[ { "index": "TUNA_0001", "video_path": "DREAM-1K/201.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 3.63, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is focused on a young woman wearing a dark coat, silver earrings on her ears, and curly hair, talking to a young man. In the background, the sky is dark, and there is a yellow taxi on the road, which is driving slowly to the right of the screen. The man wears glasses and has a light-colored shirt collar.\nThe taxi in the background continues to move to the right of the picture and disappears. The woman continues to talk to the man. The woman raises her right hand, covers her right eye, then waves to the right, and finally puts it down.\nThe camera starts to follow the woman and shakes slightly to the left. She turns left and waves to the left in front of her to hail a taxi. A second yellow taxi appears in the background and passes in front of the woman. The man gaze follows the woman, looks to the left and then disappears from the screen.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young woman wearing a dark coat, silver earrings on her ears, and curly hair, talking to a young man. In the background, the sky is dark, and there is a yellow taxi on the road, which is driving slowly to the right of the screen. The man wears glasses and has a light-colored shirt collar.", "visual_elements": [ { "content": "The camera is focused on a young woman.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a dark coat.", "type": "attribute", "weight": 2 }, { "content": "The woman is wearing silver earrings on her ears.", "type": "attribute", "weight": 2 }, { "content": "The woman has curly hair tied up.", "type": "attribute", "weight": 2 }, { "content": "The woman is talking to a young man.", "type": "action", "weight": 3 }, { "content": "The sky is dark.", "type": "scene", "weight": 2 }, { "content": "There is a yellow taxi on the road.", "type": "scene", "weight": 2 }, { "content": "The taxi is driving slowly to the right of the screen.", "type": "action", "weight": 1 }, { "content": "There is a man on the right side of the screen with his back to the camera and wearing glasses.", "type": "attribute", "weight": 2 }, { "content": "The man has a light-colored shirt collar.", "type": "attribute", "weight": 2 } ] }, { "event": "The taxi in the background continues to move to the right of the picture and disappears. The woman continues to talk to the man. The woman raises her right hand, covers her right eye, then waves to the right, and finally puts it down.", "visual_elements": [ { "content": "The taxi continues to drive to the right of the screen and disappeared.", "type": "scene", "weight": 1 }, { "content": "The woman continues to talk to the man.", "type": "action", "weight": 3 }, { "content": "The woman raises her right hand and coveres her right eye.", "type": "action", "weight": 3 }, { "content": "The woman then waves to the right.", "type": "action", "weight": 3 }, { "content": "The woman finally puts down her right hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera starts to follow the woman and shakes slightly to the left. She turns left and waves to the left in front of her to hail a taxi. A second yellow taxi appears in the background and passes in front of the woman. The man gaze follows the woman, looks to the left and then disappears from the screen.", "visual_elements": [ { "content": "The camera begins to follow the woman, panning slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The woman turns to the left.", "type": "action", "weight": 3 }, { "content": "The woman faces forward and waves.", "type": "action", "weight": 3 }, { "content": "The woman hails a taxi.", "type": "action", "weight": 3 }, { "content": "A second yellow taxi appears in the background.", "type": "scene", "weight": 2 }, { "content": "A second yellow taxi passes in front of the woman.", "type": "action", "weight": 1 }, { "content": "The male gaze follows the female.", "type": "action", "weight": 1 }, { "content": "The man disappears from the screen.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0002", "video_path": "DREAM-1K/202.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 9.47, "resolution": { "width": 1920, "height": 816 }, "caption": "At the beginning of the video, the camera is focused on a young man wearing a dark coat and a light-colored shirt. The background is a dark forest at night, with trees covered with snow. The man is sitting by the fire, and the firelight shines on his face. He first looks to his right, then lowers his head and blinks at the same time, then looks up to his left.\nThen, the camera switches to another young woman, who is also wearing a dark coat with light-colored clothes underneath. The woman is also sitting by the fire, and the firelight is also reflected on her face. The woman looks at the man with a serious expression.\nThe camera cuts back to the man. He continues to sit by the fire, looks to his right again, starts to speak, then turns to his left and speaks. The firelight is still bright, illuminating the man's silhouette.\nFinally, the camera switches to the woman and a girl. They are sitting side by side by the fire. The girl is also wearing a dark coat. The firelight shines on their faces. Both the woman and the girl lower their heads slightly and look to their left.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young man wearing a dark coat and a light-colored shirt. The background is a dark forest at night, with trees covered with snow. The man is sitting by the fire, and the firelight shines on his face. He first looks to his right, then lowers his head and blinks at the same time, then looks up to his left.", "visual_elements": [ { "content": "The camera is focused on a young man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a dark coat and a light-colored shirt.", "type": "attribute", "weight": 3 }, { "content": "The background is a dark forest at night.", "type": "scene", "weight": 2 }, { "content": "The trees are covered with snow.", "type": "attribute", "weight": 1 }, { "content": "The man is sitting by the fire.", "type": "action", "weight": 3 }, { "content": "The man looks to his right and then to his left.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches to another young woman, who is also wearing a dark coat with light-colored clothes underneath. The woman is also sitting by the fire, and the firelight is also reflected on her face. The woman looks at the man with a serious expression.", "visual_elements": [ { "content": "The camera switches to a young woman.", "type": "camera", "weight": 3 }, { "content": "The woman is also wearing a dark coat.", "type": "attribute", "weight": 3 }, { "content": "The woman is wearing light-colored clothes.", "type": "attribute", "weight": 3 }, { "content": "The woman is also sitting by the fire.", "type": "action", "weight": 3 }, { "content": "The woman looks at the man with a serious expression.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the man. He continues to sit by the fire, looks to his right again, starts to speak, then turns to his left and speaks. The firelight is still bright, illuminating the man's silhouette.", "visual_elements": [ { "content": "The camera continues to cut back to the man.", "type": "camera", "weight": 3 }, { "content": "The man continues to sit by the fire.", "type": "action", "weight": 2 }, { "content": "The man looks to his right again.", "type": "action", "weight": 3 }, { "content": "Then the man turns to his left and begins to speak.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera switches to the woman and a girl. They are sitting side by side by the fire. The girl is also wearing a dark coat. The firelight shines on their faces. Both the woman and the girl lower their heads slightly and look to their left.", "visual_elements": [ { "content": "The camera switches to the young woman and a little girl.", "type": "camera", "weight": 3 }, { "content": "The two women sit side by side by the fire.", "type": "action", "weight": 3 }, { "content": "The girl is wearing a dark coat.", "type": "attribute", "weight": 2 }, { "content": "Both the woman and the girl bow their heads slightly.", "type": "action", "weight": 2 }, { "content": "Both the woman and the girl look to their left.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0003", "video_path": "DREAM-1K/203.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 9.67, "resolution": { "width": 1920, "height": 656 }, "caption": "At the beginning of the video, the camera is focused on a narrow stone road. On the left side of the road is an old building with walls made of gray stones. On the side of the building facing the camera, there are two stone stairs with iron handrails on both sides. Above the stone stairs is a platform with iron railings. In front of the road is an arched passage, outside of which are some pedestrians and buildings.\nThere are four boys in the picture, three of them wearing white long sleeves, dark shorts, white socks, and hats. They are running forward from the left side of the cobblestone road. Behind the three boys, there is a tall boy running forward quickly, trying to catch up with them. The tall boy is wearing a dark hat, light-colored trousers, and no socks. When he is about to catch up with a boy on the right side of the road, the tall boy's hat falls off, so he turns around to pick it up.\nAfter the tall boy, a boy wearing a white long-sleeved shirt and a red vest appears. He appears from the right side of the stairs in the picture and runs down the stairs. The boy in the red vest runs down while holding the handrail with his left hand, and continues to run forward from the left side of the cobblestone road.\nThe boy in the red vest runs away, and then an adult man appears in the picture. The adult man is wearing a brown suit, a hat, and holding a brown leather bag. The adult man jogs down from the platform and looks to his right when he reaches the end of the stairs. Then the adult man speeds up and runs forward on the cobblestone road on his left side.", "events": [ { "event": "At the beginning of the video, the camera is focused on a narrow stone road. On the left side of the road is an old building with walls made of gray stones. On the side of the building facing the camera, there are two stone stairs with iron handrails on both sides. Above the stone stairs is a platform with iron railings. In front of the road is an arched passage, outside of which are some pedestrians and buildings.", "visual_elements": [ { "content": "The camera is pointed at a narrow stone road.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road is an old building.", "type": "scene", "weight": 2 }, { "content": "The walls of the building are made of gray stones.", "type": "attribute", "weight": 1 }, { "content": "There are two stone stairs on the side of the building facing the camera.", "type": "scene", "weight": 2 }, { "content": "There are iron handrails on both sides of the stone stairs.", "type": "scene", "weight": 2 }, { "content": "Above the stone stairs is a platform with iron railings.", "type": "scene", "weight": 2 }, { "content": "There is an arched passage in front of the road.", "type": "scene", "weight": 2 }, { "content": "Outside the arched passage are some pedestrians and buildings.", "type": "scene", "weight": 2 } ] }, { "event": "There are four boys in the picture, three of them wearing white long sleeves, dark shorts, white socks, and hats. They are running forward from the left side of the cobblestone road. Behind the three boys, there is a tall boy running forward quickly, trying to catch up with them. The tall boy is wearing a dark hat, light-colored trousers, and no socks. When he is about to catch up with a boy on the right side of the road, the tall boy's hat falls off, so he turns around to pick it up.", "visual_elements": [ { "content": "Four boys appear in the picture.", "type": "scene", "weight": 3 }, { "content": "Three boys are wearing white long sleeves, dark shorts, white socks, and hats.", "type": "attribute", "weight": 2 }, { "content": "The three boys are running forward quickly from the left side of the cobblestone road.", "type": "action", "weight": 3 }, { "content": "A tall boy is running forward quickly behind the three boys.", "type": "action", "weight": 3 }, { "content": "The tall boy is wearing a dark hat, light-colored trousers, and no socks.", "type": "attribute", "weight": 2 }, { "content": "The tall boy's hat falls off.", "type": "attribute", "weight": 2 }, { "content": "The tall boy turns around to pick up the hat.", "type": "action", "weight": 2 } ] }, { "event": "After the tall boy, a boy wearing a white long-sleeved shirt and a red vest appears. He appears from the right side of the stairs in the picture and runs down the stairs. The boy in the red vest runs down while holding the handrail with his left hand, and continues to run forward from the left side of the cobblestone road.", "visual_elements": [ { "content": "Behind the tall boy, another boy appears in the picture.", "type": "camera", "weight": 3 }, { "content": "The boy is wearing a white long-sleeved shirt, a red vest, and a hat.", "type": "attribute", "weight": 3 }, { "content": "The boy runs down the stairs while holding the handrail with his left hand.", "type": "action", "weight": 3 }, { "content": "The boy continues to run forward from the left side of the cobblestone road.", "type": "action", "weight": 3 } ] }, { "event": "The boy in the red vest runs away, and then an adult man appears in the picture. The adult man is wearing a brown suit, a hat, and holding a brown leather bag. The adult man jogs down from the platform and looks to his right when he reaches the end of the stairs. Then the adult man speeds up and runs forward on the cobblestone road on his left side.", "visual_elements": [ { "content": "The boy in the red vest runs away, and then an adult man appears in the picture.", "type": "camera", "weight": 3 }, { "content": "The adult man is wearing a brown suit and a hat.", "type": "attribute", "weight": 2 }, { "content": "The adult man is holding a brown leather bag.", "type": "attribute", "weight": 2 }, { "content": "The adult man jogs down from the landing.", "type": "action", "weight": 3 }, { "content": "When the adult man reaches the end of the stairs, he glances to his right.", "type": "action", "weight": 3 }, { "content": "The adult man then speeds up and runs forward on the cobblestones on his left side.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0004", "video_path": "DREAM-1K/204.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 19.68, "resolution": { "width": 1920, "height": 816 }, "caption": "The camera is focused on a busy city street. It is dark and there is a silver car in the picture. The license plate number is \"USA 1\". The silver car is driving on the street. The silver car turns left. There is a red double-decker bus in the background. There are advertisements on the body of the red double-decker bus. The front of the red double-decker bus shows the route \"159\". There are several black taxis and some pedestrians on both sides of the street.\nThe camera moves to the right and rear. The silver car continues to drive. More vehicles and pedestrians wearing heavy clothes appear in the background.\nNext, the camera retreats and the silver car continues to drive. Big Ben and the Houses of Parliament appear in the picture. There is a constant flow of cars on the street and the traffic light is green.\nThe camera switches to a residential area. The silver car appears from the left side of the picture and turns left into a small road. The small road is lined with dense trees and green belts. The car moves slowly along the small road. The camera shoots through the gap in the iron railing. The red brick exterior wall and white window frames of the mansion can be seen in the gap.\nFinally, the camera moves to the upper right, and the front of the red brick building gradually emerges. In front of the camera is a stone bridge. The building has three floors, tall windows and a green door.", "events": [ { "event": "The camera is focused on a busy city street. It is dark and there is a silver car in the picture. The license plate number is \"USA 1\". The silver car is driving on the street. The silver car turns left. There is a red double-decker bus in the background. There are advertisements on the body of the red double-decker bus. The front of the red double-decker bus shows the route \"159\". There are several black taxis and some pedestrians on both sides of the street.", "visual_elements": [ { "content": "The camera is focused on a busy city street.", "type": "camera", "weight": 3 }, { "content": "The sky is dark.", "type": "scene", "weight": 2 }, { "content": "There is a silver car in the picture.", "type": "attribute", "weight": 3 }, { "content": "The license plate number of the silver car is \"USA 1\".", "type": "attribute", "weight": 1 }, { "content": "The silver car is driving on the street.", "type": "action", "weight": 3 }, { "content": "The silver car turns left.", "type": "attribute", "weight": 2 }, { "content": "There is a red double-decker bus in the background.", "type": "scene", "weight": 2 }, { "content": "There are advertisements on the body of the red double-decker bus.", "type": "attribute", "weight": 1 }, { "content": "There are several black taxis and some pedestrians on both sides of the street.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves to the right and rear. The silver car continues to drive. More vehicles and pedestrians wearing heavy clothes appear in the background.", "visual_elements": [ { "content": "The camera moves to the right and rear.", "type": "camera", "weight": 3 }, { "content": "The silver car continues to drive.", "type": "action", "weight": 2 }, { "content": "More vehicles appear in the background.", "type": "scene", "weight": 1 }, { "content": "The pedestrians are wearing heavy clothes.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera retreats and the silver car continues to drive. Big Ben and the Houses of Parliament appear in the picture. There is a constant flow of cars on the street and the traffic light is green.", "visual_elements": [ { "content": "The camera moves back.", "type": "camera", "weight": 3 }, { "content": "The silver car continues to drive.", "type": "action", "weight": 2 }, { "content": "Big Ben and the Houses of Parliament appear in the picture.", "type": "scene", "weight": 2 }, { "content": "There is a constant flow of cars on the street.", "type": "scene", "weight": 1 }, { "content": "The traffic light shows green.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches to a residential area. The silver car appears from the left side of the picture and turns left into a small road. The small road is lined with dense trees and green belts. The car moves slowly along the small road. The camera shoots through the gap in the iron railing. The red brick exterior wall and white window frames of the mansion can be seen in the gap.", "visual_elements": [ { "content": "The camera switches to a residential area.", "type": "camera", "weight": 3 }, { "content": "The silver car appears from the left side of the picture.", "type": "attribute", "weight": 3 }, { "content": "The silver car turns left and drives into a small road.", "type": "action", "weight": 3 }, { "content": "The small road is lined with dense trees and green belts.", "type": "scene", "weight": 1 }, { "content": "The car moves slowly along the small road to the right side of the picture.", "type": "action", "weight": 3 }, { "content": "The camera shoots through the gap of the iron railings.", "type": "camera", "weight": 3 }, { "content": "The gap contains the red brick exterior wall and white window frames of the house.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera moves to the upper right, and the front of the red brick building gradually emerges. In front of the camera is a stone bridge. The building has three floors, tall windows and a green door.", "visual_elements": [ { "content": "The camera moves to the upper right, and the front of the red brick building gradually emerges.", "type": "camera", "weight": 3 }, { "content": "In front of the camera is a stone bridge.", "type": "scene", "weight": 3 }, { "content": "The building has tall windows and a green gate.", "type": "attribute", "weight": 2 } ] } ], "n_events": 5, "n_elements": 28 }, { "index": "TUNA_0005", "video_path": "DREAM-1K/205.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 8.22, "resolution": { "width": 3840, "height": 2064 }, "caption": "The video begins with the camera panning sideways at two men. Both men are sitting in rattan rocking chairs with two glasses of lemonade between them. The man closer to the camera is wearing a brown suit, white shirt, and tie. The man farther from the camera is wearing a white shirt and dark blue vest with dark pants. In the background behind them is the front porch of a white wooden house with a door. In the distance is an open wilderness with some trees and bushes.\nA little girl in a blue dress appears on the right side of the screen. The little girl quickly runs past the two men on the left. The little girl stands at the door, holding the door handle with her right hand. The little girl stops opening the door and looks back. The wind blows from the left side of the little girl. A little boy follows behind the little girl. The little boy is wearing a blue shirt and gray overalls. The little boy stops and looks at the man in the vest. The man in the vest gestures to the little boy with his left hand. The little boy walks towards the man in the vest.\nThe camera switches to the man in the vest. He touches the boy's chin with his left hand, then puts his left hand behind the boy's neck, and finally puts his right hand on the boy's shoulder. The little girl first lifts her hair, then crosses her hands in front of her chest. The little girl keeps watching this scene.", "events": [ { "event": "The video begins with the camera panning sideways at two men. Both men are sitting in rattan rocking chairs with two glasses of lemonade between them. The man closer to the camera is wearing a brown suit, white shirt, and tie. The man farther from the camera is wearing a white shirt and dark blue vest with dark pants. In the background behind them is the front porch of a white wooden house with a door. In the distance is an open wilderness with some trees and bushes.", "visual_elements": [ { "content": "The camera is pointed at two men from the side.", "type": "camera", "weight": 3 }, { "content": "Both men are sitting in rattan rocking chairs.", "type": "action", "weight": 3 }, { "content": "Two glasses of lemonade are placed between the two men.", "type": "scene", "weight": 2 }, { "content": "The man closer to the camera is wearing a brown suit, white shirt and tie.", "type": "attribute", "weight": 2 }, { "content": "The man farther from the camera is wearing a white shirt, dark blue vest and dark pants.", "type": "attribute", "weight": 2 }, { "content": "Behind the two men is the front porch of a white wooden house.", "type": "scene", "weight": 2 }, { "content": "There are some trees and bushes in the distance.", "type": "scene", "weight": 2 } ] }, { "event": "A little girl in a blue dress appears on the right side of the screen. The little girl quickly runs past the two men on the left. The little girl stands at the door, holding the door handle with her right hand. The little girl stops opening the door and looks back. The wind blows from the left side of the little girl. A little boy follows behind the little girl. The little boy is wearing a blue shirt and gray overalls. The little boy stops and looks at the man in the vest. The man in the vest gestures to the little boy with his left hand. The little boy walks towards the man in the vest.", "visual_elements": [ { "content": "A little girl in a blue dress appears on the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "The little girl is standing at the door.", "type": "attribute", "weight": 2 }, { "content": "The little girl runs quickly past the two men from the left.", "type": "action", "weight": 3 }, { "content": "The little girl holds the door handle with her right hand.", "type": "action", "weight": 3 }, { "content": "The little girl stops closing the door and looks back.", "type": "action", "weight": 3 }, { "content": "The wind blows from the little girl's left.", "type": "scene", "weight": 2 }, { "content": "A little boy follows behind the little girl.", "type": "camera", "weight": 3 }, { "content": "The little boy is wearing a blue shirt and gray dungarees.", "type": "attribute", "weight": 3 }, { "content": "The little boy stops and looks at the man in the vest.", "type": "action", "weight": 3 }, { "content": "The man in the vest gestures to the little boy with his left hand.", "type": "action", "weight": 3 }, { "content": "The little boy walks towards the man in the vest.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the man in the vest. He touches the boy's chin with his left hand, then puts his left hand behind the boy's neck, and finally puts his right hand on the boy's shoulder. The little girl first lifts her hair, then crosses her hands in front of her chest. The little girl keeps watching this scene.", "visual_elements": [ { "content": "The camera switches to the man in the vest.", "type": "camera", "weight": 3 }, { "content": "The man in the vest touches the boy's chin with his left hand, then puts his left hand behind the boy's neck, and finally puts his right hand on the boy's shoulder.", "type": "action", "weight": 3 }, { "content": "The little girl first lifts her hair, then crosses her hands in front of her chest.", "type": "action", "weight": 2 }, { "content": "The little girl keeps watching this scene.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0006", "video_path": "DREAM-1K/206.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 11.97, "resolution": { "width": 3840, "height": 2064 }, "caption": "At the beginning of the video, the camera is aimed at a man lying on a bed. He is wearing a white, worn vest and lying on an iron bed. The background is a room in the prison, with dim light. The man holds up a magazine with both hands, and the magazine covers his face. Then the man puts the magazine on his chest, with four fingers wrapped in bandages. His eyes close and then open again, and he raises his head to the right front.\nThe camera starts to follow the man's movements, shooting from behind and moving to the upper left. He throws the book to his left, then quickly stands up from the bed holding the railing, walks to the railing, and looks to his left. Opposite the man in the background is a similar prison room closed by an iron door.\nOutside the railing, a man in a black uniform appears from the left side of the screen. He holds a white object in his left hand, looks forward, and walks straight to the right in front of the man. The camera shakes to the right.", "events": [ { "event": "At the beginning of the video, the camera is aimed at a man lying on a bed. He is wearing a white, worn vest and lying on an iron bed. The background is a room in the prison, with dim light. The man holds up a magazine with both hands, and the magazine covers his face. Then the man puts the magazine on his chest, with four fingers wrapped in bandages. His eyes close and then open again, and he raises his head to the right front.", "visual_elements": [ { "content": "The camera takes a bird's-eye view of a man lying on a bed.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a tattered white vest.", "type": "attribute", "weight": 2 }, { "content": "The background is a small room with iron bars in the prison.", "type": "scene", "weight": 3 }, { "content": "The light is dim.", "type": "scene", "weight": 2 }, { "content": "The man holds a magazine in both hands.", "type": "attribute", "weight": 2 }, { "content": "The magazine covers the man's face.", "type": "attribute", "weight": 2 }, { "content": "The man then puts the magazine on his chest.", "type": "action", "weight": 3 }, { "content": "The man has a bandage on his left hand.", "type": "attribute", "weight": 2 }, { "content": "The man closes his eyes and then opens them again.", "type": "action", "weight": 1 }, { "content": "The man raises his head to the right front of him.", "type": "action", "weight": 2 } ] }, { "event": "The camera starts to follow the man's movements, shooting from behind and moving to the upper left. He throws the book to his left, then quickly stands up from the bed holding the railing, walks to the railing, and looks to his left. Opposite the man in the background is a similar prison room closed by an iron door.", "visual_elements": [ { "content": "The camera starts to follow the man's movements, shooting from behind and moving to the upper left.", "type": "camera", "weight": 3 }, { "content": "The man throws the book to his left.", "type": "action", "weight": 2 }, { "content": "The man quickly stands up from the bed holding the railing and walks to the railing.", "type": "action", "weight": 3 }, { "content": "The man looks to his left, outside the railing.", "type": "action", "weight": 2 }, { "content": "In the background, opposite the man are other rooms in the prison.", "type": "scene", "weight": 2 } ] }, { "event": "Outside the railing, a man in a black uniform appears from the left side of the screen. He holds a white object in his left hand, looks forward, and walks straight to the right in front of the man. The camera shakes to the right.", "visual_elements": [ { "content": "A man in a black uniform appears from the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The uniformed man holds a white object in his left hand.", "type": "action", "weight": 3 }, { "content": "The uniformed man looks forward and walks straight to the right in front of the man.", "type": "action", "weight": 3 }, { "content": "The camera shakes to the right.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0007", "video_path": "DREAM-1K/207.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Subject", "domain": "Film", "duration": 4.42, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with the camera focusing on an adult man. He is wearing a dark suit, white shirt and dark tie. The background is blurred and there are many people behind the man. The man stands face to face with a woman and smiles at her.\nThe camera then switches to the woman. She is a young woman with long blond hair and wearing dark clothes. She is talking to the man. The background is a relatively spacious public scene. Behind the woman, many people in different clothes gather together, and some are happily talking.\nThe woman leans forward, closes her eyes, and moves her head closer to the man. The man also moves his head closer to the woman, ready to give her a kiss.", "events": [ { "event": "The video begins with the camera focusing on an adult man. He is wearing a dark suit, white shirt and dark tie. The background is blurred and there are many people behind the man. The man stands face to face with a woman and smiles at her.", "visual_elements": [ { "content": "The camera is focused on an adult man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a dark suit and a white shirt.", "type": "attribute", "weight": 2 }, { "content": "The man is wearing a dark tie.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred, and there are many people behind the man.", "type": "scene", "weight": 2 }, { "content": "The man and an adult woman are standing face to face.", "type": "attribute", "weight": 2 }, { "content": "The man smiles at the woman.", "type": "action", "weight": 3 } ] }, { "event": "The camera then switches to the woman. She is a young woman with long blond hair and wearing dark clothes. She is talking to the man. The background is a relatively spacious public scene. Behind the woman, many people in different clothes gather together, and some are happily talking.", "visual_elements": [ { "content": "The camera then switches to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman has long blond hair.", "type": "attribute", "weight": 2 }, { "content": "The woman is wearing dark clothes.", "type": "attribute", "weight": 2 }, { "content": "The woman is talking to the man.", "type": "action", "weight": 3 }, { "content": "The background is a relatively spacious public scene.", "type": "scene", "weight": 2 }, { "content": "There are many people in different clothes gathered together.", "type": "scene", "weight": 2 }, { "content": "Some people are happily talking.", "type": "scene", "weight": 1 } ] }, { "event": "The woman leans forward, closes her eyes, and moves her head closer to the man. The man also moves his head closer to the woman, ready to give her a kiss.", "visual_elements": [ { "content": "The woman leans forward.", "type": "action", "weight": 2 }, { "content": "The woman closes her eyes.", "type": "action", "weight": 1 }, { "content": "The woman leans her head towards the man.", "type": "action", "weight": 1 }, { "content": "The man also leans his head towards the woman.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0008", "video_path": "DREAM-1K/208.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 14.22, "resolution": { "width": 3840, "height": 1600 }, "caption": "At the beginning of the video, the camera is focused on a man with a thick beard wearing a dark suit and a top hat. The background is a stage of a classical theater with many audiences. Above the stage, a man in blue clothes is suspended in the air, and a spotlight shines on him.\nThe man in the tuxedo holds one end of his hat in his right hand and runs forward while swinging his left hand. The man in the tuxedo then puts his right hand down. The man in the tuxedo looks very confident and calm. The audience around him applauds the man in the tuxedo. A white spotlight follows the man in the tuxedo all the time.\nThe camera gradually moves down. The man in the tuxedo stops. The man in the tuxedo opens his arms and speaks to the audience. The man in the tuxedo then takes off his hat with his right hand. Some of the audience behind the man in the tuxedo turn around and applaud him.\nThe man in the tuxedo turns to face the stage and swings his hat forward with his right hand. The man in the tuxedo then claps his hands. The man in the tuxedo turns back to the camera and speaks. After the man in the tuxedo leaves the camera, the auditorium goes dark.", "events": [ { "event": "At the beginning of the video, the camera is focused on a man with a thick beard wearing a dark suit and a top hat. The background is a stage of a classical theater with many audiences. Above the stage, a man in blue clothes is suspended in the air, and a spotlight shines on him.", "visual_elements": [ { "content": "The camera is focused on a man with a thick beard and wearing a dark suit and a top hat.", "type": "camera", "weight": 3 }, { "content": "The background is a stage of a classical theater.", "type": "scene", "weight": 2 }, { "content": "There are many audiences in the audience.", "type": "scene", "weight": 2 }, { "content": "Above the stage, a man in blue clothes is suspended in the air.", "type": "scene", "weight": 2 }, { "content": "The spotlight shines on the suspended man.", "type": "scene", "weight": 1 } ] }, { "event": "The man in the tuxedo holds one end of his hat in his right hand and runs forward while swinging his left hand. The man in the tuxedo then puts his right hand down. The man in the tuxedo looks very confident and calm. The audience around him applauds the man in the tuxedo. A white spotlight follows the man in the tuxedo all the time.", "visual_elements": [ { "content": "The man in the tuxedo holds one end of his hat in his right hand and runs forward while swinging his left hand.", "type": "action", "weight": 3 }, { "content": "The man in the tuxedo then puts his right hand down.", "type": "action", "weight": 1 }, { "content": "The man in the tuxedo looks very confident and calm.", "type": "attribute", "weight": 2 }, { "content": "The audience around him applauds the man in the tuxedo.", "type": "scene", "weight": 1 }, { "content": "A white spotlight follows the man in the tuxedo all the time.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera gradually moves down. The man in the tuxedo stops. The man in the tuxedo opens his arms and speaks to the audience. The man in the tuxedo then takes off his hat with his right hand. Some of the audience behind the man in the tuxedo turn around and applaud him.", "visual_elements": [ { "content": "The camera gradually moves down.", "type": "camera", "weight": 3 }, { "content": "The man in the tuxedo stops.", "type": "action", "weight": 2 }, { "content": "The man in the tuxedo opens his arms and speaks to the audience.", "type": "action", "weight": 3 }, { "content": "The man in the tuxedo then takes off his hat with his right hand.", "type": "action", "weight": 2 }, { "content": "Some of the audience behind the man in the tuxedo turn around and applaud him.", "type": "scene", "weight": 1 } ] }, { "event": "The man in the tuxedo turns to face the stage and swings his hat forward with his right hand. The man in the tuxedo then claps his hands. The man in the tuxedo turns back to the camera and speaks. After the man in the tuxedo leaves the camera, the auditorium goes dark.", "visual_elements": [ { "content": "The man in the tuxedo turns to face the stage and swings his hat forward with his right hand.", "type": "action", "weight": 2 }, { "content": "The man in the tuxedo then claps his hands.", "type": "action", "weight": 1 }, { "content": "The man in the tuxedo turns back to the camera and speaks.", "type": "action", "weight": 3 }, { "content": "After the man in the tuxedo leaves the camera, the auditorium goes dark.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0009", "video_path": "DREAM-1K/209.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 5.88, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera shakes and follows a soldier holding a weapon from behind the character. The background is a darker indoor environment, and light comes in from the broken window in front of the soldier, illuminating part of the area. The camera quickly shakes to the right of the soldier. The soldier moves forward with a gun, approaches and walks through a door frame. The center of the picture is always a fictitious gun bull's eye, and the atmosphere is very tense.\nThen, the camera switches to a row of soldiers and quickly moves to the right. These soldiers are equipped with weapons and equipment. The indoor environment is dim, with only a small amount of natural light coming in from the broken window behind them, so only the general outline of the soldiers can be seen.\nFinally, the camera switches to a room corridor. The corridor is dimly lit, and light shines into the room from the left side of the screen, illuminating part of the area. At the end of the corridor is a door and an old white wall, with bright light.", "events": [ { "event": "At the beginning of the video, the camera shakes and follows a soldier holding a weapon from behind the character. The background is a darker indoor environment, and light comes in from the broken window in front of the soldier, illuminating part of the area. The camera quickly shakes to the right of the soldier. The soldier moves forward with a gun, approaches and walks through a door frame. The center of the picture is always a fictitious gun bull's eye, and the atmosphere is very tense.", "visual_elements": [ { "content": "The camera shakes, following a soldier holding a weapon from behind the character.", "type": "camera", "weight": 3 }, { "content": "The background is a relatively dim indoor environment.", "type": "scene", "weight": 2 }, { "content": "The light comes in from the broken window in front of the soldier, illuminating part of the area.", "type": "scene", "weight": 2 }, { "content": "The camera quickly shakes to the right of the soldier.", "type": "camera", "weight": 3 }, { "content": "The soldier moves forward with a gun, approaches and walks through a door frame.", "type": "action", "weight": 3 }, { "content": "The center of the picture has always been a fictitious gun bullseye.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to a row of soldiers and quickly moves to the right. These soldiers are equipped with weapons and equipment. The indoor environment is dim, with only a small amount of natural light coming in from the broken window behind them, so only the general outline of the soldiers can be seen.", "visual_elements": [ { "content": "The camera switches to a row of soldiers from the front and moves quickly to the right.", "type": "camera", "weight": 3 }, { "content": "This row of soldiers is carrying weapons and equipment.", "type": "attribute", "weight": 2 }, { "content": "The indoor environment is dim.", "type": "scene", "weight": 2 }, { "content": "Only a small amount of natural light comes in from the broken window behind them.", "type": "scene", "weight": 1 }, { "content": "The general outline of the soldier is in the picture.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera switches to a room corridor. The corridor is dimly lit, and light shines into the room from the left side of the screen, illuminating part of the area. At the end of the corridor is a door and an old white wall, with bright light.", "visual_elements": [ { "content": "The camera switches to a room passage.", "type": "camera", "weight": 3 }, { "content": "The light in the passage is dim.", "type": "scene", "weight": 2 }, { "content": "The light shines into the room from the left side of the picture, illuminating part of the area.", "type": "scene", "weight": 1 }, { "content": "At the end of the passage is a door and an old white wall.", "type": "scene", "weight": 2 }, { "content": "The light is bright at the end of the passage.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0010", "video_path": "DREAM-1K/210.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is focused on the right side of the man's face behind a tree trunk. On the left side of the screen, a thin layer of snow covers the branches and trunk. The man is hiding behind the tree, staring forward. He is wearing a dark knitted hat and a brown jacket, with only part of his right face exposed.\nThe camera switches to the man behind the tree. Another adult man is seen in the gap between the trees. The adult man is wearing a light brown jacket and light blue jeans. The adult man is holding a pile of firewood in his right hand. The adult man is walking forward in the snow. The camera switches back to the man in the hat. The man in the hat quickly turns his head to his left and is completely hidden behind the tree trunk.\nThe camera switches again, zooming out slightly. The man in the hat is on the left side of the screen, hiding behind a thick tree trunk. The camera moves to the lower left, revealing his hands and the body parts below. He holds the gun in both hands and points it to his left. On the right side of the screen, the face of the adult man is revealed. He is a young man with curly hair and a thick beard.\nThe camera switches to the view from behind the man in the hat. The man in the hat moves the muzzle of the gun to the right and points it at the man with curly hair. The man with curly hair suddenly stops, turns to his right side with a smile, and shows the right side of his face to the man in the hat.", "events": [ { "event": "At the beginning of the video, the camera is focused on the right side of the man's face behind a tree trunk. On the left side of the screen, a thin layer of snow covers the branches and trunk. The man is hiding behind the tree, staring forward. He is wearing a dark knitted hat and a brown jacket, with only part of his right face exposed.", "visual_elements": [ { "content": "The camera is aimed at the right face of a middle-aged man hiding behind a tree.", "type": "camera", "weight": 3 }, { "content": "The branches and trunks are covered with a thin layer of snow.", "type": "scene", "weight": 1 }, { "content": "The man is hiding behind the tree, his eyes staring forward.", "type": "attribute", "weight": 2 }, { "content": "The man is wearing a dark knitted hat and a brown jacket.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to the man behind the tree. Another adult man is seen in the gap between the trees. The adult man is wearing a light brown jacket and light blue jeans. The adult man is holding a pile of firewood in his right hand. The adult man is walking forward in the snow. The camera switches back to the man in the hat. The man in the hat quickly turns his head to his left and is completely hidden behind the tree trunk.", "visual_elements": [ { "content": "The camera switches to the man's perspective behind the tree.", "type": "camera", "weight": 3 }, { "content": "Another adult man is photographed in the gap between the trees.", "type": "attribute", "weight": 2 }, { "content": "The adult man is wearing a light brown jacket and light blue jeans.", "type": "attribute", "weight": 2 }, { "content": "The adult man is holding a pile of firewood in his right hand.", "type": "attribute", "weight": 2 }, { "content": "The adult man is walking forward in the snow.", "type": "action", "weight": 3 }, { "content": "The camera switches back to the man in the hat again.", "type": "camera", "weight": 3 }, { "content": "The man in the hat quickly turns his head to his left, completely hiding behind the tree trunk.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again, zooming out slightly. The man in the hat is on the left side of the screen, hiding behind a thick tree trunk. The camera moves to the lower left, revealing his hands and the body parts below. He holds the gun in both hands and points it to his left. On the right side of the screen, the face of the adult man is revealed. He is a young man with curly hair and a thick beard.", "visual_elements": [ { "content": "The camera switches again, and the camera is slightly farther away.", "type": "camera", "weight": 3 }, { "content": "The man in the hat is on the left side of the screen, hiding behind a thick tree trunk.", "type": "attribute", "weight": 2 }, { "content": "The camera moves to the lower left, revealing the man in the hat's hand and the body below.", "type": "camera", "weight": 3 }, { "content": "The man in the hat holds the gun with both hands and points it to his left.", "type": "action", "weight": 1 }, { "content": "The adult man has curly hair and a thick beard.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to the view from behind the man in the hat. The man in the hat moves the muzzle of the gun to the right and points it at the man with curly hair. The man with curly hair suddenly stops, turns to his right side with a smile, and shows the right side of his face to the man in the hat.", "visual_elements": [ { "content": "The camera switches to the perspective behind the man in the hat.", "type": "camera", "weight": 3 }, { "content": "The man in the hat moves the muzzle of the gun to the right and aims at the curly-haired man.", "type": "action", "weight": 3 }, { "content": "The curly-haired man suddenly stops.", "type": "action", "weight": 2 }, { "content": "The curly-haired man smiles and turns to his right, showing the right side of his face to the man in the hat.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0011", "video_path": "DREAM-1K/211.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 7.25, "resolution": { "width": 1280, "height": 544 }, "caption": "At the beginning of the video, the camera is focused on an adult. The adult is wearing a work suit with shoulder straps, a pair of goggles that emit yellow-green light, and is covered in dirt and paint. The background is a dim cave with glowing plants hanging on the walls, emitting green and pink light. The camera shakes slightly. Several lights flash to the left of the adult, and he quickly turns his head to his left.\nThen, the camera switches to a little boy on the right side of the adult. He is wearing a denim vest with a skull pattern and a pair of goggles that emit yellow-green light. The boy's face and clothes are also covered with dirt and paint. The boy turns his head to his left in panic.\nThe camera shakes violently and switches to a goggle view. The picture is full of green plants. The plants in the center of the field of view\nshake.\nThe camera switches back to the adult again, and quickly shakes along with the adult's movements. The adult looks forward for a few seconds, then lowers his head to find a red tool under the table, then turns to his right and walks towards the boy, handing him the tool. The boy looks at the adult with wide eyes, looking a little nervous and scared. The adult turns his back to the boy.", "events": [ { "event": "At the beginning of the video, the camera is focused on an adult. The adult is wearing a work suit with shoulder straps, a pair of goggles that emit yellow-green light, and is covered in dirt and paint. The background is a dim cave with glowing plants hanging on the walls, emitting green and pink light. The camera shakes slightly. Several lights flash to the left of the adult, and he quickly turns his head to his left.", "visual_elements": [ { "content": "The camera is focused on an adult.", "type": "camera", "weight": 3 }, { "content": "The adult is wearing a work suit with shoulder straps and a pair of goggles that emit \nyellow-green light.", "type": "attribute", "weight": 2 }, { "content": "The adult is covered in dirt and paint.", "type": "attribute", "weight": 2 }, { "content": "The background is a dark cave with glowing plants hanging on the walls.", "type": "scene", "weight": 2 }, { "content": "The plants emit green and pink light.", "type": "scene", "weight": 1 }, { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "There are several flashes of light to the left of the adult.", "type": "scene", "weight": 2 }, { "content": "The adult turns his head quickly to his left.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches to a little boy on the right side of the adult. He is wearing a denim vest with a skull pattern and a pair of goggles that emit yellow-green light. The boy's face and clothes are also covered with dirt and paint. The boy turns his head to his left in panic.", "visual_elements": [ { "content": "The camera switches to a little boy to the right of the adult.", "type": "camera", "weight": 3 }, { "content": "The boy is wearing a denim vest with a skull pattern and a pair of goggles that emit \nyellow-green light.", "type": "attribute", "weight": 2 }, { "content": "The boy's face and clothes are covered in dirt and paint.", "type": "attribute", "weight": 2 }, { "content": "The boy turns his head to his left in panic.", "type": "action", "weight": 3 } ] }, { "event": "The camera shakes violently and switches to a goggle view. The picture is full of green plants. The plants in the center of the field of view\nshake.", "visual_elements": [ { "content": "The camera shakes violently and switches to a goggle view.", "type": "camera", "weight": 3 }, { "content": "The picture is full of green plants.", "type": "attribute", "weight": 3 }, { "content": "The plants in the center of the field of view \nshake.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches back to the adult again, and quickly shakes along with the adult's movements. The adult looks forward for a few seconds, then lowers his head to find a red tool under the table, then turns to his right and walks towards the boy, handing him the tool. The boy looks at the adult with wide eyes, looking a little nervous and scared. The adult turns his back to the boy.", "visual_elements": [ { "content": "The camera switches back to the adult again, and shakes quickly following the adult's movements.", "type": "camera", "weight": 3 }, { "content": "The adult looks forward for a few seconds.", "type": "action", "weight": 1 }, { "content": "The adult then lowers his head and finds a red tool under the table.", "type": "action", "weight": 3 }, { "content": "The adult turns to his right and walks towards the boy, handing the tool to him.", "type": "attribute", "weight": 3 }, { "content": "The boy looks at the adult with wide eyes.", "type": "action", "weight": 3 }, { "content": "The boy looks a little nervous and scared.", "type": "attribute", "weight": 1 }, { "content": "The adult turns his back to the boy.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0012", "video_path": "DREAM-1K/212.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.12, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with the camera focused on the face of a young woman. Her hair is brown and falls on her shoulders. She is looking at a blurry object in front of her. The background is blurred, showing a green environment. The camera shakes and pulls away.\nNext, the camera switches to a leafy sky, with branches and leaves interlaced, and sunlight shining through the leaves, casting mottled shadows.\nThe camera slowly moves to the lower left, and a man appears on the left side of the screen. The man is facing the camera sideways, has brown hair, wears dark clothes, and looks up at the sky in the distance. The background is green trees and sky. Finally, the camera gradually rotates to the right and stays behind the man.", "events": [ { "event": "The video begins with the camera focused on the face of a young woman. Her hair is brown and falls on her shoulders. She is looking at a blurry object in front of her. The background is blurred, showing a green environment. The camera shakes and pulls away.", "visual_elements": [ { "content": "The camera is focused on the face of a young woman.", "type": "camera", "weight": 3 }, { "content": "The woman has brown hair that falls on her shoulders.", "type": "attribute", "weight": 2 }, { "content": "The woman is looking at a blurry object in front of her.", "type": "action", "weight": 3 }, { "content": "The background is blurry, showing a green environment.", "type": "scene", "weight": 2 }, { "content": "The camera shakes and pulls away.", "type": "camera", "weight": 3 } ] }, { "event": "Next, the camera switches to a leafy sky, with branches and leaves interlaced, and sunlight shining through the leaves, casting mottled shadows.", "visual_elements": [ { "content": "The camera switches to a leafy sky.", "type": "camera", "weight": 3 }, { "content": "The branches and leaves are intertwined.", "type": "scene", "weight": 1 }, { "content": "The sun shines through the leaves, casting mottled light and shadows.", "type": "scene", "weight": 2 } ] }, { "event": "The camera slowly moves to the lower left, and a man appears on the left side of the screen. The man is facing the camera sideways, has brown hair, wears dark clothes, and looks up at the sky in the distance. The background is green trees and sky. Finally, the camera gradually rotates to the right and stays behind the man.", "visual_elements": [ { "content": "The camera moves slowly to the lower left.", "type": "camera", "weight": 3 }, { "content": "A man appears on the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man is facing the camera sideways, has brown hair, and wears dark clothes.", "type": "attribute", "weight": 2 }, { "content": "The man is looking up at the sky in the distance.", "type": "action", "weight": 3 }, { "content": "The background is green trees and sky.", "type": "scene", "weight": 2 }, { "content": "The camera rotates around the man and moves to the right and back, staying behind the man.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0013", "video_path": "DREAM-1K/213.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.12, "resolution": { "width": 1904, "height": 1024 }, "caption": "The camera is aimed at a girl and a baby elephant, shooting from bottom to top. The background is a dim environment, illuminated by the fire. The girl has long hair and wears a light blue dress. The girl is standing on a ladder. The baby elephant is standing on a high platform. The baby elephant's face is dressed up like a clown, with a white face and red nose, and a hat with the words \"FIRE6\" on its head. The baby elephant has a light-colored cape around its neck and looks worried and nervous. The girl holds the ladder railing with her left hand and hands a white feather to the baby elephant with her right hand. The baby elephant successfully takes the feather with its nose.\nNext, the camera switches to two ropes at a certain point on the ladder. The inner rope begins to burn and gradually breaks. The camera then switches back to the girl. The little girl is smiling and stretching her right hand forward, then suddenly lowers her head.\nThe camera zooms out to show the entire scene. The elephant is standing on the top of a tall tower, and the windows on the third floor of the tower are on fire. The girl falls off the ladder. The elephant looks at the girl falling, and sways his body and quickly backs away.\nThe camera switches to the audience seats under the tower. The audience in the background sits in their seats, looking at the situation on the top of the tower with nervous expressions. In the foreground of the camera, a clown in a yellow uniform stands next to the device under the tower. The clown quickly runs to the left side of the screen.", "events": [ { "event": "The camera is aimed at a girl and a baby elephant, shooting from bottom to top. The background is a dim environment, illuminated by the fire. The girl has long hair and wears a light blue dress. The girl is standing on a ladder. The baby elephant is standing on a high platform. The baby elephant's face is dressed up like a clown, with a white face and red nose, and a hat with the words \"FIRE6\" on its head. The baby elephant has a light-colored cape around its neck and looks worried and nervous. The girl holds the ladder railing with her left hand and hands a white feather to the baby elephant with her right hand. The baby elephant successfully takes the feather with its nose.", "visual_elements": [ { "content": "The camera is aimed at a girl and a baby elephant, shooting from bottom to top.", "type": "camera", "weight": 3 }, { "content": "The background is a dim environment, illuminated by the fire.", "type": "scene", "weight": 2 }, { "content": "The girl has long hair and wears a light blue dress.", "type": "attribute", "weight": 2 }, { "content": "The woman is standing on a ladder.", "type": "attribute", "weight": 3 }, { "content": "The baby elephant is standing on a high platform.", "type": "attribute", "weight": 3 }, { "content": "The baby elephant's face is dressed up like a clown, with a white face and red nose, and a hat with the words \"FIRE6\" on its head.", "type": "attribute", "weight": 2 }, { "content": "The baby elephant has a light-colored cape around its neck and looks worried and nervous.", "type": "attribute", "weight": 2 }, { "content": "The girl holds the ladder railing with her left hand.", "type": "attribute", "weight": 2 }, { "content": "The girl hands a white feather to the baby elephant with her right hand.", "type": "action", "weight": 3 }, { "content": "The baby elephant successfully takes the feather with its nose.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to two ropes at a certain point on the ladder. The inner rope begins to burn and gradually breaks. The camera then switches back to the girl. The little girl is smiling and stretching her right hand forward, then suddenly lowers her head.", "visual_elements": [ { "content": "The camera switches to two ropes, and a close-up of a rope slightly farther from the camera begins to burn and gradually breaks.", "type": "camera", "weight": 3 }, { "content": "The camera then switches back to the girl.", "type": "camera", "weight": 3 }, { "content": "The girl smiles and stretches her right hand forward, then suddenly lowers her head.", "type": "action", "weight": 3 } ] }, { "event": "The camera zooms out to show the entire scene. The elephant is standing on the top of a tall tower, and the windows on the third floor of the tower are on fire. The girl falls off the ladder. The elephant looks at the girl falling, and sways his body and quickly backs away.", "visual_elements": [ { "content": "The camera zooms out to show the entire scene.", "type": "camera", "weight": 3 }, { "content": "The elephant is standing on the top of a tall tower, and the windows on the third floor of the tower are on fire.", "type": "scene", "weight": 2 }, { "content": "The girl falls off the ladder.", "type": "action", "weight": 3 }, { "content": "The elephant looks at the girl falling, and sways his body and quickly backs away.", "type": "action", "weight": 1 } ] }, { "event": "The camera switches to the audience seats under the tower. The audience in the background sits in their seats, looking at the situation on the top of the tower with nervous expressions. In the foreground of the camera, a clown in a yellow uniform stands next to the device under the tower. The clown quickly runs to the left side of the screen.", "visual_elements": [ { "content": "The camera switches to the audience seats under the tower.", "type": "camera", "weight": 3 }, { "content": "The audience in the background sits in their seats, looking at the situation on the top of the tower with nervous expressions.", "type": "scene", "weight": 3 }, { "content": "In the foreground of the camera, a clown in a yellow uniform stands next to the device under the tower.", "type": "attribute", "weight": 3 }, { "content": "The clown quickly runs to the left side of the screen.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0014", "video_path": "DREAM-1K/214.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 9.88, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the picture is very dark, and a woman's face appears behind an unclosed door. Then, the camera quickly switches to a dim passage. There is a white light at the end of the passage, which is relatively bright. The entrance of one section of the passage is polygonal, with a silver frame, and some boxes are stacked on the left.\nThe camera is focused on the woman. She is a young woman with long black hair, wearing a white vest and holding an axe in her right hand. She is leaning against the half-open door of the passage and slowly moving to her left. She looks very alert. The lights on the screen gradually fade and become dim.\nThe camera switches to another passage. There is a polygonal silver-framed entrance between each section of the passage. The lights in the passage are constantly flashing.\nThe camera switches back to the woman, focusing on her face. The woman opens her eyes wide and looks as far to her left as possible, looking very nervous and scared.", "events": [ { "event": "At the beginning of the video, the picture is very dark, and a woman's face appears behind an unclosed door. Then, the camera quickly switches to a dim passage. There is a white light at the end of the passage, which is relatively bright. The entrance of one section of the passage is polygonal, with a silver frame, and some boxes are stacked on the left.", "visual_elements": [ { "content": "The picture is very dark.", "type": "scene", "weight": 3 }, { "content": "A woman's face appears behind an unclosed door.", "type": "camera", "weight": 3 }, { "content": "The camera quickly switches to a dim passage.", "type": "camera", "weight": 3 }, { "content": "There is a white light at the end of the passage, which is brighter.", "type": "scene", "weight": 2 }, { "content": "The entrance to one section of the passage is polygonal and has a silver frame.", "type": "scene", "weight": 1 }, { "content": "Some boxes are stacked on the left side of the entrance to one section of the passage.", "type": "scene", "weight": 2 } ] }, { "event": "The camera is focused on the woman. She is a young woman with long black hair, wearing a white vest and holding an axe in her right hand. She is leaning against the half-open door of the passage and slowly moving to her left. She looks very alert. The lights on the screen gradually fade and become dim.", "visual_elements": [ { "content": "The camera is aimed at the woman.", "type": "camera", "weight": 3 }, { "content": "The woman is a young woman with long black hair, wearing a white vest and holding an axe in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The woman is close to the half-open door of the passage.", "type": "action", "weight": 3 }, { "content": "The woman slowly moves to her left.", "type": "action", "weight": 1 }, { "content": "The woman looks very alert.", "type": "attribute", "weight": 1 }, { "content": "The lights on the screen gradually disappear and become dim.", "type": "scene", "weight": 3 } ] }, { "event": "The camera switches to another passage. There is a polygonal silver-framed entrance between each section of the passage. The lights in the passage are constantly flashing.", "visual_elements": [ { "content": "The camera switches to another passage.", "type": "camera", "weight": 3 }, { "content": "There is an entrance with a polygonal silver frame between each section of the other passage.", "type": "attribute", "weight": 2 }, { "content": "The lights in the passage in the picture keep flashing.", "type": "scene", "weight": 3 } ] }, { "event": "The camera switches back to the woman, focusing on her face. The woman opens her eyes wide and looks as far to her left as possible, looking very nervous and scared.", "visual_elements": [ { "content": "The camera switches back to the woman and focuses on her face.", "type": "camera", "weight": 3 }, { "content": "The woman opens her eyes wide and looks as far to her left as possible.", "type": "action", "weight": 3 }, { "content": "The woman looks very nervous and scared.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0015", "video_path": "DREAM-1K/215.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.38, "resolution": { "width": 1904, "height": 1024 }, "caption": "At the beginning of the video, the camera is focused on a huge iron gate and remains motionless. The gate is surrounded by rocky cliffs, with a dark night sky and dead trees in the background. There is a red light behind the iron gate, and a thin mist is floating on the ground. As the mist descends, a car with two headlights on is driving towards the iron gate.\nNext, the camera switches to a dim street, where two male detainees in dark blue uniforms are escorting a woman, each holding one of her hands. The woman is obese, wearing green scale-like clothing, and has long blonde hair. In the background is another closed iron gate.\nThe camera starts to zoom in to the lower left of the screen. The woman breaks free from the detainee on her left, turns to her right, and kicks the detainee on her right. The detainee on her right is kicked to the ground. The detainee on the left of the woman waves his hands to control the woman, and then is lifted up by the woman's hands and thrown into the water. The light in the background is dim, and there are several small orange skull-shaped lights on the buildings on both sides of the screen.\nFinally, the camera switches to the woman, shaking slightly. The woman stands alone and turns to her left. In the background, two men open the iron gate and run to the left of the screen after seeing the woman.", "events": [ { "event": "At the beginning of the video, the camera is focused on a huge iron gate and remains motionless. The gate is surrounded by rocky cliffs, with a dark night sky and dead trees in the background. There is a red light behind the iron gate, and a thin mist is floating on the ground. As the mist descends, a car with two headlights on is driving towards the iron gate.", "visual_elements": [ { "content": "The video is a 3D animation. The camera is aimed at a huge iron gate.", "type": "camera", "weight": 3 }, { "content": "The background is the dark night sky and dead trees.", "type": "scene", "weight": 2 }, { "content": "There is a red light behind the iron gate, and the ground is filled with mist.", "type": "scene", "weight": 2 }, { "content": "The mist is falling.", "type": "scene", "weight": 2 }, { "content": "There is a car with two headlights on.", "type": "scene", "weight": 1 }, { "content": "The car is driving towards the iron gate.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to a dim street, where two male detainees in dark blue uniforms are escorting a woman, each holding one of her hands. The woman is obese, wearing green scale-like clothing, and has long blonde hair. In the background is another closed iron gate.", "visual_elements": [ { "content": "The camera switches to a dim street.", "type": "camera", "weight": 3 }, { "content": "Two men in dark blue uniforms are escorting a woman.", "type": "action", "weight": 3 }, { "content": "Two detainees each hold one of the woman's hands.", "type": "attribute", "weight": 2 }, { "content": "The woman is obese, wearing green scaly clothing and has long blonde hair.", "type": "attribute", "weight": 2 }, { "content": "In the background is another tightly closed iron gate.", "type": "scene", "weight": 2 } ] }, { "event": "The camera starts to zoom in to the lower left of the screen. The woman breaks free from the detainee on her left, turns to her right, and kicks the detainee on her right. The detainee on her right is kicked to the ground. The detainee on the left of the woman waves his hands to control the woman, and then is lifted up by the woman's hands and thrown into the water. The light in the background is dim, and there are several small orange skull-shaped lights on the buildings on both sides of the screen.", "visual_elements": [ { "content": "The camera begins to zoom in to the lower left of the screen.", "type": "camera", "weight": 3 }, { "content": "The woman breaks free from the detainee on her left.", "type": "action", "weight": 3 }, { "content": "The woman turns to her right and kicks the detainee on her right away.", "type": "action", "weight": 3 }, { "content": "The detainee on her right is kicked to the ground.", "type": "action", "weight": 2 }, { "content": "The detainee on the left of the woman waves his hands to control the woman.", "type": "action", "weight": 2 }, { "content": "The detainee on the left of the woman is lifted up by the woman's hands and thrown into the water.", "type": "action", "weight": 3 }, { "content": "The light in the background is dim, and there are several small orange skull-shaped lights on the buildings on both sides of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera switches to the woman, shaking slightly. The woman stands alone and turns to her left. In the background, two men open the iron gate and run to the left of the screen after seeing the woman.", "visual_elements": [ { "content": "The camera switches to the woman, shaking slightly.", "type": "camera", "weight": 3 }, { "content": "The woman stands alone and turns to her left to leave.", "type": "action", "weight": 3 }, { "content": "Two men in the background open the iron gate.", "type": "scene", "weight": 2 }, { "content": "The two men run to the left of the screen after seeing the woman.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0016", "video_path": "DREAM-1K/216.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 4.25, "resolution": { "width": 1904, "height": 1024 }, "caption": "At the beginning of the video, the camera is aimed at the interior of a large tent building with bright orange-yellow lights. On the left side of the screen is an open white door, and the scenery outside the door is dim. A man wearing a light-colored top and dark pants is located in the center of the screen and starts to run quickly to the right side of the screen in the tent.\nThe camera pans to the right following the man. A string of sparks suddenly explodes above the tent, and the entire tent goes dark. A hazy yellow halo appears on the wall inside the tent, with only the man's outline. The man leans forward and runs faster to the right of the screen.\nThe man runs through a door and into another room, then stands with his head down and stops running. The camera passes through the door and focuses on the man again. There is still a yellow halo in the background, and there is an unlit light above the tent.", "events": [ { "event": "At the beginning of the video, the camera is aimed at the interior of a large tent building with bright orange-yellow lights. On the left side of the screen is an open white door, and the scenery outside the door is dim. A man wearing a light-colored top and dark pants is located in the center of the screen and starts to run quickly to the right side of the screen in the tent.", "visual_elements": [ { "content": "The camera is aimed at the interior of a large tent building.", "type": "camera", "weight": 3 }, { "content": "There is a bright orange-yellow light inside.", "type": "scene", "weight": 3 }, { "content": "There is an open white door on the left side of the screen, and the scenery outside the door is dim.", "type": "scene", "weight": 2 }, { "content": "A man wearing a light-colored top and dark pants is in the center of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man starts to run quickly to the right side of the screen in the tent.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans to the right following the man. A string of sparks suddenly explodes above the tent, and the entire tent goes dark. A hazy yellow halo appears on the wall inside the tent, with only the man's outline. The man leans forward and runs faster to the right of the screen.", "visual_elements": [ { "content": "The camera pans to the right with the man.", "type": "camera", "weight": 3 }, { "content": "A series of sparks explode above the tent.", "type": "scene", "weight": 1 }, { "content": "There is a hazy yellow halo on the wall inside the tent.", "type": "scene", "weight": 2 }, { "content": "There is only the outline of the man in the picture.", "type": "attribute", "weight": 2 }, { "content": "The man leans forward and runs faster to the right side of the screen.", "type": "attribute", "weight": 3 } ] }, { "event": "The man runs through a door and into another room, then stands with his head down and stops running. The camera passes through the door and focuses on the man again. There is still a yellow halo in the background, and there is an unlit light above the tent.", "visual_elements": [ { "content": "The man runs through a door and enters another room.", "type": "action", "weight": 3 }, { "content": "The camera passes through the door and focuses on the man again.", "type": "camera", "weight": 3 }, { "content": "The man stands with his head down and stops running.", "type": "action", "weight": 2 }, { "content": "There is an unlit light above the tent.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0017", "video_path": "DREAM-1K/218.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 8.12, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on a young female character wearing armor, holding a shield in one hand, and fighting fiercely with an enemy flying in front of her. The enemy on the right side of the screen is a young man wearing black armor, a horned helmet, and holding a burning battle axe. In the background is a tall stone wall, and on the left side of the screen is another enemy wearing a black combat uniform and a pair of black wings, flying towards the woman with a gun.\nNext, the camera moves downward and pulls back. The enemy swings his axe, and the woman and her shield fall to the ground to her right. Then, the camera switches to a bird's-eye view of the woman falling to the ground. She holds the sword in her right hand and the shield in her left hand. The background is a metal ground covered with grids. The camera quickly moves to the upper right. The enemy holds the woman's shield with his hand and throws her and the shield to the high wall. Thick ash gushes forward from the wall.\nThe camera switches to a top-down perspective again. The enemy leaps up from the platform below, holding the burning battle axe high in his right hand. The background is a cylindrical construction site with a platform in the middle.\nThe camera switches to the woman. She leans against the stone wall and quickly turns her head to the right. The enemy chops the axe against the stone wall. The camera finally moves downward. The woman falls down and quickly grabs the wall with her left hand. Then, the enemy with the axe turns to his left and looks down at the woman. The woman holds the sword in her right hand and chops the enemy with the axe in the leg.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young female character wearing armor, holding a shield in one hand, and fighting fiercely with an enemy flying in front of her. The enemy on the right side of the screen is a young man wearing black armor, a horned helmet, and holding a burning battle axe. In the background is a tall stone wall, and on the left side of the screen is another enemy wearing a black combat uniform and a pair of black wings, flying towards the woman with a gun.", "visual_elements": [ { "content": "The camera focuses on a young female character in armor.", "type": "camera", "weight": 3 }, { "content": "The woman is holding a shield in one hand and is fighting fiercely with the enemy flying in front of her.", "type": "action", "weight": 3 }, { "content": "The enemy on the right side of the screen is a young man wearing black armor and a horned helmet.", "type": "attribute", "weight": 2 }, { "content": "The enemy on the right side of the screen is holding a burning battle axe.", "type": "action", "weight": 3 }, { "content": "The background is a tall stone wall.", "type": "scene", "weight": 2 }, { "content": "There is another enemy on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "The enemy on the left side of the screen is wearing a black combat uniform and has a pair of black wings.", "type": "attribute", "weight": 1 }, { "content": "The enemy on the left side of the screen is flying with a gun towards the woman.", "type": "action", "weight": 1 } ] }, { "event": "Next, the camera moves downward and pulls back. The enemy swings his axe, and the woman and her shield fall to the ground to her right. Then, the camera switches to a bird's-eye view of the woman falling to the ground. She holds the sword in her right hand and the shield in her left hand. The background is a metal ground covered with grids. The camera quickly moves to the upper right. The enemy holds the woman's shield with his hand and throws her and the shield to the high wall. Thick ash gushes forward from the wall.", "visual_elements": [ { "content": "The camera moves downward and pulls back.", "type": "camera", "weight": 3 }, { "content": "The enemy on the right side of the screen swings an axe.", "type": "action", "weight": 3 }, { "content": "The woman then falls to the ground to her right with her shield.", "type": "action", "weight": 3 }, { "content": "The camera switches to a bird's-eye view of the woman falling to the ground.", "type": "camera", "weight": 3 }, { "content": "The woman holds a sword in her right hand and a shield in her left hand.", "type": "attribute", "weight": 2 }, { "content": "The background is a metal ground covered with grids.", "type": "scene", "weight": 2 }, { "content": "The camera moves quickly to the upper right.", "type": "camera", "weight": 3 }, { "content": "The enemy on the right side of the screen holds the woman's shield with his hand and throws the woman and the shield to the high wall.", "type": "action", "weight": 3 }, { "content": "Thick ash gushes forward from the wall.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a top-down perspective again. The enemy leaps up from the platform below, holding the burning battle axe high in his right hand. The background is a cylindrical construction site with a platform in the middle.", "visual_elements": [ { "content": "The camera switches to a bird's-eye view again.", "type": "camera", "weight": 3 }, { "content": "The enemy holding the axe jumps up from the platform below, holding the burning battle axe high in his hand.", "type": "action", "weight": 3 }, { "content": "The background is a cylindrical building site with a platform in the middle.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to the woman. She leans against the stone wall and quickly turns her head to the right. The enemy chops the axe against the stone wall. The camera finally moves downward. The woman falls down and quickly grabs the wall with her left hand. Then, the enemy with the axe turns to his left and looks down at the woman. The woman holds the sword in her right hand and chops the enemy with the axe in the leg.", "visual_elements": [ { "content": "The camera switches to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman leans against the stone wall.", "type": "attribute", "weight": 2 }, { "content": "The woman quickly turns her head to the right.", "type": "action", "weight": 3 }, { "content": "The enemy chops the axe on the stone wall.", "type": "action", "weight": 3 }, { "content": "The camera gradually moves downward.", "type": "camera", "weight": 3 }, { "content": "The woman falls down and quickly grabs the wall with her left hand.", "type": "action", "weight": 2 }, { "content": "Then, the enemy with the axe turns to the left and looks down at the woman.", "type": "action", "weight": 2 }, { "content": "The woman holds the sword in her right hand and chops the enemy with the axe on the leg.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0018", "video_path": "DREAM-1K/219.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 5.38, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a dim room. A hand appears in the picture, holding a glass jar full of dust in the left hand and a shovel in the right hand. In the background, there are several similar glass jars on the left side of the picture.\nThe camera then switches to a closer angle, showing the man using a shovel to pour some black powdery substance into a glass jar.\nThe camera switches to a bird's-eye view of a man. There is a white light above the man, and only the outline of his face is visible. The table is cluttered with many glass jars, a box, and several rolls of wide tape.\nThe camera then switches to focus more closely on the man's hands. The man continues to shovel the black powder into the glass jar, and then he retracts the shovel.\nThe camera switches and moves to the left following the man's movements. The man places the can on a wooden board on the table with his left hand. The man holds a pair of scissors in his right hand, and there is a black strip under his right hand. There is a roll of tape on the left side of the man's hand.", "events": [ { "event": "At the beginning of the video, the camera focuses on a dim room. A hand appears in the picture, holding a glass jar full of dust in the left hand and a shovel in the right hand. In the background, there are several similar glass jars on the left side of the picture.", "visual_elements": [ { "content": "The camera focuses on a dark room full of glass jars.", "type": "camera", "weight": 3 }, { "content": "A hand appears in the picture.", "type": "attribute", "weight": 3 }, { "content": "The left hand holds a dusty glass jar, and the right hand holds a small shovel.", "type": "attribute", "weight": 2 }, { "content": "In the background, there are several glass jars similar to the one in the hand on the left side of the picture.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then switches to a closer angle, showing the man using a shovel to pour some black powdery substance into a glass jar.", "visual_elements": [ { "content": "The camera switches to a closer angle.", "type": "camera", "weight": 3 }, { "content": "The man is pouring some black powdery substance into the glass jar with a small shovel.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to a bird's-eye view of a man. There is a white light above the man, and only the outline of his face is visible. The table is cluttered with many glass jars, a box, and several rolls of wide tape.", "visual_elements": [ { "content": "The camera switches to a bird's-eye view of a man.", "type": "camera", "weight": 3 }, { "content": "There is a white light above the man.", "type": "scene", "weight": 2 }, { "content": "The man's face is only an outline.", "type": "attribute", "weight": 3 }, { "content": "The table is messy, with many glass jars, a box and several rolls of wide tape.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then switches to focus more closely on the man's hands. The man continues to shovel the black powder into the glass jar, and then he retracts the shovel.", "visual_elements": [ { "content": "The camera switches to a closer focus on the man's hands.", "type": "camera", "weight": 3 }, { "content": "The man continues to shovel black powder into the glass jar, then retracts the shovel.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches and moves to the left following the man's movements. The man places the can on a wooden board on the table with his left hand. The man holds a pair of scissors in his right hand, and there is a black strip under his right hand. There is a roll of tape on the left side of the man's hand.", "visual_elements": [ { "content": "The camera switches and moves to the left following the man's movements.", "type": "camera", "weight": 3 }, { "content": "The man's left hand places the jar on a wooden board on the table.", "type": "action", "weight": 3 }, { "content": "The man holds a pair of scissors in his right hand.", "type": "action", "weight": 2 }, { "content": "There is a black strip-like object under the man's right hand.", "type": "scene", "weight": 2 }, { "content": "There is a roll of tape on the man's left hand.", "type": "scene", "weight": 1 } ] } ], "n_events": 5, "n_elements": 17 }, { "index": "TUNA_0019", "video_path": "DREAM-1K/222.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.38, "resolution": { "width": 1280, "height": 640 }, "caption": "The camera is focused on the interior of a gorgeously decorated Christmas store. A young man stands on the right side of the screen. He is wearing a khaki windbreaker, a gray sweater and a light-colored hat. The man is looking back. Behind the man is a blonde woman wearing a green Christmas outfit, a black belt and a green hat. The woman walks forward from the right back of the man. There is an arch behind the two of them, and above the arch hangs a neon sign. The sign reads \"WONDERFUL\". Behind the arch is a golden curtain. The store is decorated with Christmas trees, white LED lights, dolls and other Christmas decorations.\nThe camera pulls back. The woman talks to the man and shakes her left hand. The man also turns his head and slowly moves forward.\nThe camera switches to the store entrance. The woman pushes open a blue-framed glass door with both hands. There is an \"OPEN\" sign on the door. On the left side of the screen is a Christmas tree decorated with lights. The woman leans against a door on the left side of the screen. A black man walks out of the door sideways. Then, the man in the hoodie walks out of the door sideways.\nThe camera switches to behind the woman, focusing on the man in the sweatshirt. The man in the sweatshirt turns around at the door of the store and speaks to the woman while raising his right hand to make gestures. On the left side of the screen is a shelf behind the door, on which are placed some small dolls such as elk dolls. In the distance outside the store door is a light-colored building with many passers-by in front.", "events": [ { "event": "The camera is focused on the interior of a gorgeously decorated Christmas store. A young man stands on the right side of the screen. He is wearing a khaki windbreaker, a gray sweater and a light-colored hat. The man is looking back. Behind the man is a blonde woman wearing a green Christmas outfit, a black belt and a green hat. The woman walks forward from the right back of the man. There is an arch behind the two of them, and above the arch hangs a neon sign. The sign reads \"WONDERFUL\". Behind the arch is a golden curtain. The store is decorated with Christmas trees, white LED lights, dolls and other Christmas decorations.", "visual_elements": [ { "content": "The camera is focused on the interior of a gorgeously decorated Christmas store.", "type": "camera", "weight": 3 }, { "content": "A young man is standing on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a khaki windbreaker, a gray sweater and a light-colored hat.", "type": "attribute", "weight": 3 }, { "content": "The man is looking back.", "type": "action", "weight": 3 }, { "content": "Behind the man is a blonde woman wearing a green Christmas outfit, a black belt and a green hat.", "type": "attribute", "weight": 2 }, { "content": "The woman is walking towards the man from the right.", "type": "action", "weight": 2 }, { "content": "There is an arch behind the two of them, with a neon sign hanging above the arch.", "type": "scene", "weight": 2 }, { "content": "The neon sign reads \"WONDERFUL\".", "type": "scene", "weight": 1 }, { "content": "Behind the arch is a golden curtain.", "type": "scene", "weight": 2 }, { "content": "The store is decorated with Christmas trees, white LED lights, dolls and other Christmas decorations.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pulls back. The woman talks to the man and shakes her left hand. The man also turns his head and slowly moves forward.", "visual_elements": [ { "content": "The camera pulls back.", "type": "camera", "weight": 3 }, { "content": "The woman talks to the man and shakes her left hand.", "type": "action", "weight": 3 }, { "content": "The man also turns his head and slowly moves forward.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to the store entrance. The woman pushes open a blue-framed glass door with both hands. There is an \"OPEN\" sign on the door. On the left side of the screen is a Christmas tree decorated with lights. The woman leans against a door on the left side of the screen. A black man walks out of the door sideways. Then, the man in the hoodie walks out of the door sideways.", "visual_elements": [ { "content": "The camera switches to the store entrance.", "type": "camera", "weight": 3 }, { "content": "The woman pushes open a blue-framed door with both hands.", "type": "action", "weight": 3 }, { "content": "There is an \"OPEN\" sign on a door on the right side of the screen.", "type": "scene", "weight": 1 }, { "content": "There is a Christmas tree decorated with lights on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "The woman leans against a door on the left side of the screen.", "type": "action", "weight": 3 }, { "content": "A black man walks out of the door sideways.", "type": "action", "weight": 2 }, { "content": "Then, the man in the sweatshirt walks out of the door sideways.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to behind the woman, focusing on the man in the sweatshirt. The man in the sweatshirt turns around at the door of the store and speaks to the woman while raising his right hand to make gestures. On the left side of the screen is a shelf behind the door, on which are placed some small dolls such as elk dolls. In the distance outside the store door is a light-colored building with many passers-by in front.", "visual_elements": [ { "content": "The camera switches to the back of the woman, focusing on the man in the sweatshirt.", "type": "camera", "weight": 3 }, { "content": "The man in the sweatshirt turns around at the store entrance and speaks to the woman while raising his right hand to make gestures.", "type": "action", "weight": 3 }, { "content": "On the left side of the screen is a shelf with a night light wrapped around it behind the door.", "type": "scene", "weight": 2 }, { "content": "There are some small dolls on the shelf.", "type": "scene", "weight": 1 }, { "content": "In the distance outside the store entrance is a light-colored building, and there are many passers-by in front of the building.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0020", "video_path": "DREAM-1K/225.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 4.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is shooting upward from a low angle. The weather is clear and there are some white clouds in the sky. A huge blue robot appears in the picture. The camera slowly pans to the right, and the robot waves its arms and gradually turns to the left side of the picture. There are some damaged buildings in the background.\nThe camera switches to a further perspective, with the robot walking on a destroyed street. The buildings on both sides of the street are severely damaged, with rubble and debris scattered on the ground, and some low trees on both sides of the street. The sun shines down from the upper right corner of the picture.\nIn front of the robot, a spherical object rolls forward quickly from the street on the right side of the screen. The sphere drops some fragments as it rolls. The robot turns towards the camera and follows the sphere with his eyes.\nThe sphere turns right in front of the camera, and the camera rotates left and finally behind it. In the background, on the right side of the frame, there is a tall, damaged building, with a tree and a wrecked white car in front. A building collapsed in front of the street, blocking the sphere's forward movement. In the distance, there is an upright gray-white building under a blue sky and white clouds.", "events": [ { "event": "At the beginning of the video, the camera is shooting upward from a low angle. The weather is clear and there are some white clouds in the sky. A huge blue robot appears in the picture. The camera slowly pans to the right, and the robot waves its arms and gradually turns to the left side of the picture. There are some damaged buildings in the background.", "visual_elements": [ { "content": "The camera is shooting upward from a low angle.", "type": "camera", "weight": 3 }, { "content": "The weather is clear, with some white clouds in the sky.", "type": "scene", "weight": 2 }, { "content": "A huge blue robot appears in the picture.", "type": "camera", "weight": 3 }, { "content": "The camera slowly pans to the right.", "type": "camera", "weight": 3 }, { "content": "The robot waves its arms and gradually turns to the left side of the picture.", "type": "action", "weight": 3 }, { "content": "There are some damaged buildings in the \nbackground.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a further perspective, with the robot walking on a destroyed street. The buildings on both sides of the street are severely damaged, with rubble and debris scattered on the ground, and some low trees on both sides of the street. The sun shines down from the upper right corner of the picture.", "visual_elements": [ { "content": "The camera switches to a farther perspective.", "type": "camera", "weight": 3 }, { "content": "The robot stands on a destroyed street.", "type": "attribute", "weight": 2 }, { "content": "Buildings on both sides of the street have collapsed.", "type": "scene", "weight": 2 }, { "content": "Rubble and debris are scattered on the ground.", "type": "scene", "weight": 1 }, { "content": "There are some low trees on both sides of the street.", "type": "scene", "weight": 1 }, { "content": "The sun shines down from the upper right corner of the picture.", "type": "scene", "weight": 2 } ] }, { "event": "In front of the robot, a spherical object rolls forward quickly from the street on the right side of the screen. The sphere drops some fragments as it rolls. The robot turns towards the camera and follows the sphere with his eyes.", "visual_elements": [ { "content": "There is a spherical object rolling forward quickly from the street on the right side of the picture.", "type": "action", "weight": 3 }, { "content": "The sphere drops some debris while rolling.", "type": "action", "weight": 1 }, { "content": "The robot turns to the camera and follows the sphere with his eyes.", "type": "action", "weight": 3 } ] }, { "event": "The sphere turns right in front of the camera, and the camera rotates left and finally behind it. In the background, on the right side of the frame, there is a tall, damaged building, with a tree and a wrecked white car in front. A building collapsed in front of the street, blocking the sphere's forward movement. In the distance, there is an upright gray-white building under a blue sky and white clouds.", "visual_elements": [ { "content": "The camera rotates to the left following the movement of the sphere and finally stands behind \nthe sphere.", "type": "camera", "weight": 3 }, { "content": "The picture is surrounded by ruins and collapsed \nbuildings.", "type": "scene", "weight": 3 }, { "content": "There is a tree and a destroyed white car on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "There is a row of red and white triangular roadblocks on the ground on the left side of the picture.", "type": "scene", "weight": 1 }, { "content": "There is an upright gray-white building in the distance.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0021", "video_path": "DREAM-1K/227.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 8.25, "resolution": { "width": 1904, "height": 1024 }, "caption": "At the beginning of the video, the camera is focused on a red convertible that is driving with its back to the camera. It is night time, and the surroundings are dark and deserted. The tail lights are on. Then, the car turns left, and the wheels kick up some dust.\nThen, the camera switches to the front of the car, facing the two people inside. On the left side of the screen is a young man wearing a white inner garment and a dark jacket, wearing a necklace, looking back at the rear of the car. On the right side of the screen is a young woman with long hair, holding the steering wheel, looking panicked. The lights in the car are dim, and only the outlines of the two people can be seen.\nThe camera then switches to a lower angle outside the car again, following the car moving forward quickly. The wheels turn quickly and the car body bumps on the road. Then, the camera switches to the front of the car. A dark blue car is chasing the convertible from the left rear of the convertible.\nThe camera then zooms in to focus on a dark blue police car driving fast, with its headlights on and blue and yellow lights flashing on its roof.\nFinally, the camera shoots down the ground at a high angle, showing a convertible and three police cars driving on a circular road, with police cars in front and behind the convertible. There are orange lights on the edge and center of the circular road.", "events": [ { "event": "At the beginning of the video, the camera is focused on a red convertible that is driving with its back to the camera. It is night time, and the surroundings are dark and deserted. The tail lights are on. Then, the car turns left, and the wheels kick up some dust.", "visual_elements": [ { "content": "The camera is focused on a red convertible that is driving with its back to the camera.", "type": "camera", "weight": 3 }, { "content": "The background is at night and it is pitch black all \naround.", "type": "scene", "weight": 3 }, { "content": "The taillights are on.", "type": "attribute", "weight": 2 }, { "content": "The car is moving quickly left and right.", "type": "action", "weight": 3 }, { "content": "The wheels are kicking up some dust.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to the front of the car, facing the two people inside. On the left side of the screen is a young man wearing a white inner garment and a dark jacket, wearing a necklace, looking back at the rear of the car. On the right side of the screen is a young woman with long hair, holding the steering wheel, looking panicked. The lights in the car are dim, and only the outlines of the two people can be seen.", "visual_elements": [ { "content": "The camera switches to the front of the car, facing the two people in the car.", "type": "camera", "weight": 3 }, { "content": "A young man is on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The man is wearing a dark coat, a white inner layer, and a necklace.", "type": "attribute", "weight": 2 }, { "content": "The man looks back at the back of the car.", "type": "action", "weight": 2 }, { "content": "A young woman with long hair is on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The woman is holding the steering wheel with a panicked look.", "type": "attribute", "weight": 2 }, { "content": "The lights in the car are dim, with only the outlines of the two people.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera then switches to a lower angle outside the car again, following the car moving forward quickly. The wheels turn quickly and the car body bumps on the road. Then, the camera switches to the front of the car. A dark blue car is chasing the convertible from the left rear of the convertible.", "visual_elements": [ { "content": "The camera switches to a lower angle outside the car again, following the car's rapid forward movement.", "type": "camera", "weight": 3 }, { "content": "The wheels turn rapidly, and the car body bumps on the road.", "type": "action", "weight": 3 }, { "content": "The camera switches to the front of the car.", "type": "camera", "weight": 3 }, { "content": "A dark blue police car is chasing the convertible in the rear right of the screen.", "type": "action", "weight": 3 } ] }, { "event": "The camera then zooms in to focus on a dark blue police car driving fast, with its headlights on and blue and yellow lights flashing on its roof.", "visual_elements": [ { "content": "Then, the camera moves forward and focuses on the dark blue police car that is driving fast.", "type": "camera", "weight": 3 }, { "content": "The headlights of the police car are on, and the roof also flashes blue and yellow lights.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera shoots down the ground at a high angle, showing a convertible and three police cars driving on a circular road, with police cars in front and behind the convertible. There are orange lights on the edge and center of the circular road.", "visual_elements": [ { "content": "The camera finally shoots down the ground at a high angle, showing the convertible and three police cars driving on a circular road.", "type": "camera", "weight": 3 }, { "content": "There are police cars in front and behind the \nconvertible.", "type": "attribute", "weight": 2 }, { "content": "There are orange lights at the edge and center of the circular road.", "type": "scene", "weight": 2 } ] } ], "n_events": 5, "n_elements": 21 }, { "index": "TUNA_0022", "video_path": "DREAM-1K/228.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 3.38, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera is focused on a long-haired young woman wearing a pink top, who is standing in front of a table in the kitchen. There is a stainless steel bowl on the table with food in it. The woman extends her right hand and dips a finger into the food in the bowl and takes it back. Behind her, there is a shelf with some kitchen utensils on it.\nThe camera then switches to four young men standing directly in front of the woman. There are two other women on the right side of the screen, standing against the edge of the table. The woman on the far right is wearing a mainly white T-shirt and green skirt, and a necklace. She extends her right hand forward, with a bracelet on it. She grins, revealing her white teeth. The woman on the left has long pink-purple hair, a bracelet wrapped around her right hand, her right hand against her chin, her mouth pursed, and her left eyebrow slightly wrinkled. On the left side of the screen, far away from the table, there is a man sitting at a table with gray hair, wearing a mainly white long-sleeved T-shirt and long pants. He is holding a brown and yellow puppy sitting in a bowl in his right hand, and his left hand is open with five fingers pointing forward. In front of the right of the gray-haired man is a long-haired man wearing dark clothes. He looks at the woman in the pink top in front of him and points his right hand forward. In the background, some kitchen utensils such as spoons and sieves are hung above the two men, and there are two white lights on the wall above. On the table is a bowl of eggs, a small-caliber glass container, a cutting board covered with a white cloth, and three large-caliber containers filled with powdered food.\nThe camera switches back to the woman in the pink top. She tastes the food on her index finger and looks to her left, savoring the food carefully.\nThe camera switches to two men, filming them closely. The grey-haired man looks surprised, with his eyes wide open and his mouth wide open. The puppy sitting in the basin looks up and forward. The man on the far left of the screen also looks at the left side of the screen with his eyes wide open.", "events": [ { "event": "At the beginning of the video, the camera is focused on a long-haired young woman wearing a pink top, who is standing in front of a table in the kitchen. There is a stainless steel bowl on the table with food in it. The woman extends her right hand and dips a finger into the food in the bowl and takes it back. Behind her, there is a shelf with some kitchen utensils on it.", "visual_elements": [ { "content": "The camera is focused on a long-haired young woman wearing a pink top.", "type": "camera", "weight": 3 }, { "content": "The woman in the pink top is standing in front of a table in the kitchen.", "type": "attribute", "weight": 3 }, { "content": "There is a stainless steel bowl on the table with food in\n it.", "type": "scene", "weight": 2 }, { "content": "The woman extends her right hand and dips a finger into the food in the bowl before pulling it back.", "type": "action", "weight": 3 }, { "content": "There is a shelf behind the woman in the pink top.", "type": "scene", "weight": 2 }, { "content": "There are some kitchen utensils on the shelf.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then switches to four young men standing directly in front of the woman. There are two other women on the right side of the screen, standing against the edge of the table. The woman on the far right is wearing a mainly white T-shirt and green skirt, and a necklace. She extends her right hand forward, with a bracelet on it. She grins, revealing her white teeth. The woman on the left has long pink-purple hair, a bracelet wrapped around her right hand, her right hand against her chin, her mouth pursed, and her left eyebrow slightly wrinkled. On the left side of the screen, far away from the table, there is a man sitting at a table with gray hair, wearing a mainly white long-sleeved T-shirt and long pants. He is holding a brown and yellow puppy sitting in a bowl in his right hand, and his left hand is open with five fingers pointing forward. In front of the right of the gray-haired man is a long-haired man wearing dark clothes. He looks at the woman in the pink top in front of him and points his right hand forward. In the background, some kitchen utensils such as spoons and sieves are hung above the two men, and there are two white lights on the wall above. On the table is a bowl of eggs, a small-caliber glass container, a cutting board covered with a white cloth, and three large-caliber containers filled with powdered food.", "visual_elements": [ { "content": "The camera then switches to the four young men standing in front of the woman.", "type": "camera", "weight": 3 }, { "content": "There are two other women on the right side of the screen, standing against the edge of the table.", "type": "attribute", "weight": 2 }, { "content": "There is a bowl of eggs, a small-caliber glass container, a chopping board covered with a white cloth, and three large-caliber containers of \npowdered food on the table.", "type": "scene", "weight": 2 }, { "content": "The woman on the far right of the screen is wearing a white T-shirt with dark blue sleeves and a green skirt, and a necklace.", "type": "attribute", "weight": 2 }, { "content": "The woman on the far right of the screen extends her right hand forward.", "type": "action", "weight": 3 }, { "content": "The woman on the far right of the screen has a bracelet on her right hand.", "type": "attribute", "weight": 1 }, { "content": "The woman on the far right of the screen grins, showing her white teeth.", "type": "action", "weight": 1 }, { "content": "The woman on the left of the screen has long pink and purple hair, and her right hand is wrapped with a \nBracelet.", "type": "attribute", "weight": 2 }, { "content": "The woman on the left side of the picture has her right hand under her chin, her lips pursed, and her left eyebrow slightly wrinkled.", "type": "action", "weight": 1 }, { "content": "A man in the center of the picture is sitting on a bench on the left side of the picture.", "type": "action", "weight": 3 }, { "content": "The man in the center of the picture has gray hair and wears a white T-shirt with black patterns and trousers.", "type": "attribute", "weight": 2 }, { "content": "The man in the center of the picture is holding a brown and yellow puppy sitting in a basin with his right \nhand.", "type": "attribute", "weight": 2 }, { "content": "The man in the center of the picture has five fingers of his left hand open, pointing forward.", "type": "action", "weight": 1 }, { "content": "In front of the gray-haired man to the right is a long-haired man wearing dark clothes.", "type": "attribute", "weight": 2 }, { "content": "The long-haired man is looking at the woman in the pink top in front of him, and his right hand is pointing \nforward.", "type": "action", "weight": 1 }, { "content": "A row of kitchen utensils is hanging above the two \nmen.", "type": "scene", "weight": 2 }, { "content": "There are two white lights on the wall above the two \nmen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches back to the woman in the pink top. She tastes the food on her index finger and looks to her left, savoring the food carefully.", "visual_elements": [ { "content": "The camera switches back to the woman in the pink\n shirt.", "type": "camera", "weight": 3 }, { "content": "The woman in the pink shirt tastes the food on her index finger.", "type": "action", "weight": 3 }, { "content": "The woman in the pink shirt looks to her left.", "type": "action", "weight": 1 } ] }, { "event": "The camera switches to two men, filming them closely. The grey-haired man looks surprised, with his eyes wide open and his mouth wide open. The puppy sitting in the basin looks up and forward. The man on the far left of the screen also looks at the left side of the screen with his eyes wide open.", "visual_elements": [ { "content": "The camera switches to the two men, taking a close-up shot of the two men.", "type": "camera", "weight": 3 }, { "content": "The gray-haired man looks surprised, with his eyes wide open and his mouth wide open.", "type": "action", "weight": 2 }, { "content": "The man on the far left of the screen also looks to the left of the screen with his eyes wide open.", "type": "action", "weight": 1 } ] } ], "n_events": 4, "n_elements": 29 }, { "index": "TUNA_0023", "video_path": "DREAM-1K/229.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.12, "resolution": { "width": 1904, "height": 1024 }, "caption": "At the beginning of the video, the camera is focused on a gray baby elephant standing in a fence with blue vertical bars and yellow hay on the ground. The baby elephant's head is facing the camera, with its ears hanging on both sides and some hay on top of its body. The baby elephant flicks its trunk to the left of the screen and lowers its head at the same time.\nThe camera switches to the right side of the baby elephant. The baby elephant's front legs are bent and close to the ground, with its ears and trunk pressed against the ground. The baby elephant uses its trunk to throw a black, flake-like object in front of it that is not hay to the left side of the baby elephant.\nAt the end of the video, the camera switches to two children outside the fence. On the left side of the screen is a little boy. He is wearing a striped shirt and overalls, with his right hand on the fence. He leans forward, looking at the baby elephant and talking to it. On the right side of the screen is a little girl. She is wearing a rose-red inner layer and a gray outer coat, with a key-shaped necklace around her neck, and her right hand is resting on her chest. The little girl first looks to her right at the little boy, then looks at the baby elephant.", "events": [ { "event": "At the beginning of the video, the camera is focused on a gray baby elephant standing in a fence with blue vertical bars and yellow hay on the ground. The baby elephant's head is facing the camera, with its ears hanging on both sides and some hay on top of its body. The baby elephant flicks its trunk to the left of the screen and lowers its head at the same time.", "visual_elements": [ { "content": "The camera is pointed at a gray baby elephant.", "type": "camera", "weight": 3 }, { "content": "The baby elephant is standing in a fence.", "type": "attribute", "weight": 3 }, { "content": "The vertical bars of the fence are blue.", "type": "scene", "weight": 1 }, { "content": "The ground is covered with yellow hay.", "type": "scene", "weight": 2 }, { "content": "The baby elephant's head is facing the camera, with its ears hanging on both sides and some hay on its \nbody.", "type": "attribute", "weight": 2 }, { "content": "The baby elephant flicks its trunk to the left side of the screen and lowers its head.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the right side of the baby elephant. The baby elephant's front legs are bent and close to the ground, with its ears and trunk pressed against the ground. The baby elephant uses its trunk to throw a black, flake-like object in front of it that is not hay to the left side of the baby elephant.", "visual_elements": [ { "content": "The camera switches to the right side of the baby elephant.", "type": "camera", "weight": 3 }, { "content": "The baby elephant's front legs are bent and touching the ground.", "type": "action", "weight": 3 }, { "content": "The baby elephant's ears and trunk are touching the ground.", "type": "attribute", "weight": 2 }, { "content": "There is a black piece of non-hay in front of the baby elephant's trunk.", "type": "attribute", "weight": 1 }, { "content": "The baby elephant uses its trunk to flick the object in front to the left side of the baby elephant.", "type": "action", "weight": 3 } ] }, { "event": "At the end of the video, the camera switches to two children outside the fence. On the left side of the screen is a little boy. He is wearing a striped shirt and overalls, with his right hand on the fence. He leans forward, looking at the baby elephant and talking to it. On the right side of the screen is a little girl. She is wearing a rose-red inner layer and a gray outer coat, with a key-shaped necklace around her neck, and her right hand is resting on her chest. The little girl first looks to her right at the little boy, then looks at the baby elephant.", "visual_elements": [ { "content": "The camera switches to two children outside the \nfence.", "type": "camera", "weight": 3 }, { "content": "On the left side of the screen is a little boy.", "type": "attribute", "weight": 3 }, { "content": "The little boy is wearing a striped shirt and overalls, with his right hand on the fence.", "type": "attribute", "weight": 2 }, { "content": "The little boy leans forward and talks while looking at the baby elephant.", "type": "action", "weight": 3 }, { "content": "On the right side of the screen is a little girl.", "type": "camera", "weight": 3 }, { "content": "The little girl is wearing a rose-red inner layer and a gray outer layer, with a key-shaped necklace around \nher neck.", "type": "attribute", "weight": 2 }, { "content": "The little girl first looks to her right at the little boy, then looks at the baby elephant.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0024", "video_path": "DREAM-1K/230.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 12.12, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is close-up and slightly shaking. The woman has blonde hair, wears a black sleeveless top, and wears a black collar and multiple necklaces around her neck. The woman is facing the camera and standing on the left side of the camera. In the background is a wine cabinet with many red and orange wines.\nThe camera switches to a thin silver tool with a flame. Below the tool is a double-layered bowl-shaped container filled with liquid and some wrinkled objects. The flame ignites the objects in the bowl, and sparks fly around.\nThe camera switches to shoot the woman's upper body and surroundings from the side. The woman is on the right side of the picture, standing next to a long table. On the long table are an alcohol lamp, candles, flower baskets, containers and other items. The woman takes back the lit tool with her right hand and blows out the flame.\nThe camera switches to the table again, aiming at a small khaki bowl. The bowl contains liquid with sparkling particles in it. The camera moves to the upper right, and a woman's hand appears on the right side of the screen. The woman's index finger, middle finger, and pinky finger are all wearing rings. The woman puts a syringe with a thin silver needle tip into the bowl. The woman sucks the green liquid into the syringe.\nAt the end of the video, the camera switches to a silver tray. The tray contains a syringe and a silver metal clamp. A hand in the upper left corner of the screen quickly picks up the syringe. The camera moves up with the hand's movement. There is also a small glass tray on the tray, which contains a red cherry.", "events": [ { "event": "At the beginning of the video, the camera is close-up and slightly shaking. The woman has blonde hair, wears a black sleeveless top, and wears a black collar and multiple necklaces around her neck. The woman is facing the camera and standing on the left side of the camera. In the background is a wine cabinet with many red and orange wines.", "visual_elements": [ { "content": "The camera is close up and slightly shaking.", "type": "camera", "weight": 3 }, { "content": "The woman has blonde hair, wears a black sleeveless top, and wears a black collar and multiple necklaces around her neck.", "type": "attribute", "weight": 3 }, { "content": "The woman is facing the camera and standing on the left side of the camera.", "type": "attribute", "weight": 2 }, { "content": "The background is a wine cabinet with a lot of red and orange wine.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a thin silver tool with a flame. Below the tool is a double-layered bowl-shaped container filled with liquid and some wrinkled objects. The flame ignites the objects in the bowl, and sparks fly around.", "visual_elements": [ { "content": "The camera switches to a slender silver tool with a \nflame.", "type": "camera", "weight": 3 }, { "content": "Under the tool is a double-layered bowl-shaped container.", "type": "attribute", "weight": 2 }, { "content": "The bowl-shaped container contains liquid and some wrinkled objects.", "type": "attribute", "weight": 2 }, { "content": "The flames ignite the objects in the bowl, and sparks fly around.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to shoot the woman's upper body and surroundings from the side. The woman is on the right side of the picture, standing next to a long table. On the long table are an alcohol lamp, candles, flower baskets, containers and other items. The woman takes back the lit tool with her right hand and blows out the flame.", "visual_elements": [ { "content": "The camera switches to shoot the woman's upper body and surroundings from the side.", "type": "camera", "weight": 3 }, { "content": "The woman is on the right side of the screen, standing next to a long table.", "type": "attribute", "weight": 2 }, { "content": "The long table is filled with an alcohol lamp, candles, flower baskets, containers and other items.", "type": "scene", "weight": 2 }, { "content": "The woman takes back the lit tool with her right hand and blows out the flame.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the table again, aiming at a small khaki bowl. The bowl contains liquid with sparkling particles in it. The camera moves to the upper right, and a woman's hand appears on the right side of the screen. The woman's index finger, middle finger, and pinky finger are all wearing rings. The woman puts a syringe with a thin silver needle tip into the bowl. The woman sucks the green liquid into the syringe.", "visual_elements": [ { "content": "The camera switches to the table again, aiming at a small khaki bowl.", "type": "camera", "weight": 3 }, { "content": "The khaki bowl contains liquid with sparkling particles \nin it.", "type": "attribute", "weight": 2 }, { "content": "The camera moves to the upper right, and a woman's hand appears on the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "The woman's index finger, middle finger and pinky finger all have rings.", "type": "attribute", "weight": 1 }, { "content": "The woman puts a syringe with a slender silver needle tip into the bowl.", "type": "action", "weight": 3 }, { "content": "The woman draws green liquid into the syringe.", "type": "action", "weight": 3 } ] }, { "event": "At the end of the video, the camera switches to a silver tray. The tray contains a syringe and a silver metal clamp. A hand in the upper left corner of the screen quickly picks up the syringe. The camera moves up with the hand's movement. There is also a small glass tray on the tray, which contains a red cherry.", "visual_elements": [ { "content": "The camera switches to a silver tray with a syringe and a silver metal clip on it.", "type": "camera", "weight": 3 }, { "content": "A hand on the upper left of the screen quickly takes the syringe away.", "type": "action", "weight": 3 }, { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "There is also a small glass tray on the tray, which contains a red cherry.", "type": "attribute", "weight": 1 } ] } ], "n_events": 5, "n_elements": 22 }, { "index": "TUNA_0025", "video_path": "DREAM-1K/231.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 13.12, "resolution": { "width": 1904, "height": 784 }, "caption": "The video begins with the camera focused on a young woman. She is in the center of the frame, wearing a black leather jacket and with nearly blonde hair. On the left side of the frame is an open green metal door. In the background is a dark tunnel with brick walls and three white lights on the top of the tunnel. The woman looks sideways toward the door, then back to look behind her.\nThe camera then moves back and stays in front of the woman. The woman stands up straight and quickly runs towards the camera. The camera moves down and shoots the woman from above. After the woman runs through the metal door, she swings her arms quickly and runs forward faster. Then, the woman suddenly stops running with her arms and looks to her left. There are some white lights arranged in a straight line in the middle of the top of the tunnel.\nThe camera cuts to an area surrounded by barbed wire, with a blue warning sign hanging above it, reading \"GEFAHR! Brennbare Flüssigkeit\" in white letters.\nThe camera quickly moves to the right, and the woman quickly grasps the right end of a mesh door with both hands. Then, the camera switches to the back of the woman. The woman has a ponytail, and she opens the mesh door to her left. There are two jars behind the door, the jar on the left side of the screen is marked with the number \"09\", and the jar on the right side of the screen is marked with the number \"05\". There are some pipes connected to the jars.\nThe camera switches to a narrow brick arched passageway, dimly lit, with some white lights above it. A man runs hard from the end of the passageway towards the camera, and the camera moves forward, getting closer and closer to the man. He is bald, shirtless, and wearing black trousers.", "events": [ { "event": "The video begins with the camera focused on a young woman. She is in the center of the frame, wearing a black leather jacket and with nearly blonde hair. On the left side of the frame is an open green metal door. In the background is a dark tunnel with brick walls and three white lights on the top of the tunnel. The woman looks sideways toward the door, then back to look behind her.", "visual_elements": [ { "content": "The camera is focused on a young woman in the center of the frame.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a black leather jacket and has blonde hair.", "type": "attribute", "weight": 2 }, { "content": "There is an open green door on the left side of the frame and a dark tunnel in the background.", "type": "scene", "weight": 2 }, { "content": "The background is a dark tunnel with three white lights on the top.", "type": "scene", "weight": 2 }, { "content": "The walls of the tunnel are made of bricks.", "type": "attribute", "weight": 1 }, { "content": "The woman faces the wall on the left side of the frame, then turns to look around.", "type": "action", "weight": 3 }, { "content": "Then, the woman looks back behind her.", "type": "action", "weight": 3 } ] }, { "event": "The camera then moves back and stays in front of the woman. The woman stands up straight and quickly runs towards the camera. The camera moves down and shoots the woman from above. After the woman runs through the metal door, she swings her arms quickly and runs forward faster. Then, the woman suddenly stops running with her arms and looks to her left. There are some white lights arranged in a straight line in the middle of the top of the tunnel.", "visual_elements": [ { "content": "The camera moves backwards, always in front of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman stands up straight and quickly runs towards the camera.", "type": "action", "weight": 3 }, { "content": "The camera is lowered to shoot the woman from above.", "type": "camera", "weight": 3 }, { "content": "The woman swings her arms quickly and runs forward faster.", "type": "action", "weight": 3 }, { "content": "The woman suddenly stops running with her hands together, and looks to her left.", "type": "action", "weight": 3 }, { "content": "There are some white lights arranged in a straight line in the middle of the top of the tunnel.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to an area surrounded by barbed wire, with a blue warning sign hanging above it, reading \"GEFAHR! Brennbare Flüssigkeit\" in white letters.", "visual_elements": [ { "content": "The camera switches to an area surrounded by an iron grid.", "type": "camera", "weight": 3 }, { "content": "A blue warning sign is hung on the iron grid.", "type": "camera", "weight": 3 }, { "content": "The warning sign reads \"GEFAHR! Brennbare Flüssigkeit\" in white letters.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera quickly moves to the right, and the woman quickly grasps the right end of a mesh door with both hands. Then, the camera switches to the back of the woman. The woman has a ponytail, and she opens the mesh door to her left. There are two jars behind the door, the jar on the left side of the screen is marked with the number \"09\", and the jar on the right side of the screen is marked with the number \"05\". There are some pipes connected to the jars.", "visual_elements": [ { "content": "The camera moves right quickly.", "type": "camera", "weight": 3 }, { "content": "The woman quickly grasps the right end of a mesh door with both hands.", "type": "action", "weight": 3 }, { "content": "The camera switches to the back of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman has a ponytail.", "type": "attribute", "weight": 2 }, { "content": "The woman opens the mesh door to her left.", "type": "action", "weight": 3 }, { "content": "There are two metal cans behind the door.", "type": "scene", "weight": 2 }, { "content": "The can on the left side of the screen is marked with the number \"09\", and the can on the right side of the screen is marked with the number \"05\".", "type": "scene", "weight": 2 }, { "content": "Some pipes are connected to the cans.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a narrow brick arched passageway, dimly lit, with some white lights above it. A man runs hard from the end of the passageway towards the camera, and the camera moves forward, getting closer and closer to the man. He is bald, shirtless, and wearing black trousers.", "visual_elements": [ { "content": "The camera switches to a narrow brick arched passage.", "type": "camera", "weight": 3 }, { "content": "The arched passage is dimly lit, with some white lights above.", "type": "scene", "weight": 2 }, { "content": "A man runs hard towards the camera from the end of the arched passage.", "type": "action", "weight": 3 }, { "content": "The camera moves forward, getting closer and closer to the man.", "type": "camera", "weight": 3 }, { "content": "The man is bald, shirtless, and wearing black trousers.", "type": "attribute", "weight": 2 } ] } ], "n_events": 5, "n_elements": 29 }, { "index": "TUNA_0026", "video_path": "DREAM-1K/232.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 8.5, "resolution": { "width": 1904, "height": 1024 }, "caption": "At the beginning of the video, the camera is focused on the front of an elephant. The background is dim. The elephant is on the right side of the screen, with large, drooping ears. On the left side of the screen is a bald man wearing a dark suit. The elephant waves its ears and walks to the right side of the screen. The man tilts his head to the right and follows the elephant with his eyes.\nThe camera switches to the back of the elephant, where the bald man stands on the right side of the elephant. Behind the elephant is a man wearing a red hat and a dark shirt, who lowers his head and pushes the elephant forward.\nThen, the camera zooms in and shoots from the side of the man in the hat. The bald man continues to follow the elephant. To his right is a man wearing a blue hat and a blue uniform. The uniformed man stands tall and looks to his right. To the right of the uniformed man stands another man wearing dark clothes and a dark hat.\nFinally, the camera switches to a scene inside a gate, where some people are crowding and reaching their hands through the gap in the gate.", "events": [ { "event": "At the beginning of the video, the camera is focused on the front of an elephant. The background is dim. The elephant is on the right side of the screen, with large, drooping ears. On the left side of the screen is a bald man wearing a dark suit. The elephant waves its ears and walks to the right side of the screen. The man tilts his head to the right and follows the elephant with his eyes.", "visual_elements": [ { "content": "The camera is focused on the front of an elephant.", "type": "camera", "weight": 3 }, { "content": "The background is dim.", "type": "scene", "weight": 3 }, { "content": "The elephant is on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "The elephant has large, drooping ears.", "type": "attribute", "weight": 1 }, { "content": "On the left side of the picture is a bald man in a dark \nsuit.", "type": "attribute", "weight": 3 }, { "content": "The elephant waves its ears and walks toward the camera.", "type": "action", "weight": 3 }, { "content": "The man tilts his head to the right and follows the elephant.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the back of the elephant, where the bald man stands on the right side of the elephant. Behind the elephant is a man wearing a red hat and a dark shirt, who lowers his head and pushes the elephant forward.", "visual_elements": [ { "content": "The camera switches to the back of the elephant.", "type": "camera", "weight": 3 }, { "content": "The bald man stands on the right side of the elephant.", "type": "attribute", "weight": 3 }, { "content": "Behind the elephant is a man wearing a red hat and a dark shirt.", "type": "attribute", "weight": 2 }, { "content": "The man in the hat lowers his head and pushes the elephant forward.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera zooms in and shoots from the side of the man in the hat. The bald man continues to follow the elephant. To his right is a man wearing a blue hat and a blue uniform. The uniformed man stands tall and looks to his right. To the right of the uniformed man stands another man wearing dark clothes and a dark hat.", "visual_elements": [ { "content": "The camera zooms in and shoots from the side of the man in the hat.", "type": "camera", "weight": 3 }, { "content": "The bald man continues to follow the elephant.", "type": "action", "weight": 2 }, { "content": "To the right of the bald man is a man wearing a blue hat and a blue uniform.", "type": "attribute", "weight": 2 }, { "content": "The man in the uniform stands tall.", "type": "attribute", "weight": 2 }, { "content": "The man in the uniform looks up to his right.", "type": "action", "weight": 3 }, { "content": "In the background, to the right of the man in uniform stands a man wearing dark clothes and a dark hat.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera switches to a scene inside a gate, where some people are crowding and reaching their hands through the gap in the gate.", "visual_elements": [ { "content": "The camera finally switches to a scene inside a fence gate.", "type": "camera", "weight": 3 }, { "content": "In the picture, some people are crowding and reaching their hands through the gap in the fence \ngate.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0027", "video_path": "DREAM-1K/233.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 4.88, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera shows four children running on a sidewalk with fallen leaves. The background is cloudy, with some small hills and trees in the distance, and a few short trees with brown leaves on the right side of the screen. Behind the trees is a courtyard decorated with a Halloween theme, and there is a fake tombstone decorated with a skull in front of the courtyard.\nThe person on the far left of the picture is wearing a mummy costume and is following closely behind the other three people. The young girl on the left of the picture is wearing a jacket and a T-shirt with NASA text and light blue jeans. The young black boy on the right of the picture is wearing a striped shirt and gray pants. The young boy on the far right of the picture is wearing an orange sweatshirt, a dark blue jacket, dark blue jeans and brown shoes. The young boy on the far right of the picture is running in front of the road.\nSuddenly, the four people stop and look to the left of the screen. The camera switches to another scene, where two children and an adult are walking on the land next to the sidewalk. The adult is dressed in black. The child walking in front is wearing a pirate costume, and the child walking behind is wearing a blue hoodie, black pants and blue boots, and holding a small basket in his left hand. In the background is a giant white ghost sculpture and several low-rise houses.\nA young man gestures with his right hand towards the camera, signaling the person in front to come over. He is wearing bunny ears and a bunny nose and mouth, white gloves on both hands, white trousers and white shoes, and holding a gray canvas bag in his left hand.\nThe camera switches to the bunny-dressed man. He runs toward the camera with his head down, and behind him are a row of jack-o'-lanterns of varying sizes. Finally, the camera switches back to the original four people, who have stopped and are standing straight on the wet sidewalk. The mummy-dressed man holds out his hands. There is water on the sidewalk in the background.", "events": [ { "event": "At the beginning of the video, the camera shows four children running on a sidewalk with fallen leaves. The background is cloudy, with some small hills and trees in the distance, and a few short trees with brown leaves on the right side of the screen. Behind the trees is a courtyard decorated with a Halloween theme, and there is a fake tombstone decorated with a skull in front of the courtyard.", "visual_elements": [ { "content": "The camera shows four children running on a sidewalk with fallen leaves.", "type": "camera", "weight": 3 }, { "content": "The background is cloudy with some hills and trees in the distance.", "type": "scene", "weight": 2 }, { "content": "There are several short trees with brown leaves on the right side of the frame.", "type": "scene", "weight": 2 }, { "content": "Behind the trees is a courtyard decorated with a Halloween theme.", "type": "scene", "weight": 2 }, { "content": "There is a fake tombstone decorated with a skull in front of the courtyard.", "type": "scene", "weight": 2 } ] }, { "event": "The person on the far left of the picture is wearing a mummy costume and is following closely behind the other three people. The young girl on the left of the picture is wearing a jacket and a T-shirt with NASA text and light blue jeans. The young black boy on the right of the picture is wearing a striped shirt and gray pants. The young boy on the far right of the picture is wearing an orange sweatshirt, a dark blue jacket, dark blue jeans and brown shoes. The young boy on the far right of the picture is running in front of the road.", "visual_elements": [ { "content": "The person on the far left of the frame is wearing a mummy costume.", "type": "camera", "weight": 3 }, { "content": "The person in the mummy costume is following closely behind the other three people.", "type": "attribute", "weight": 2 }, { "content": "The young girl on the left side of the frame is wearing a jacket and a T-shirt with NASA text and light blue jeans.", "type": "attribute", "weight": 2 }, { "content": "The young black boy on the right side of the frame is wearing a striped shirt and gray pants.", "type": "attribute", "weight": 2 }, { "content": "The young boy on the far right side of the frame is wearing an orange sweatshirt, a dark blue jacket, dark blue jeans and brown shoes.", "type": "attribute", "weight": 2 }, { "content": "The young boy on the far right side of the frame is running in front of the road.", "type": "attribute", "weight": 2 } ] }, { "event": "Suddenly, the four people stop and look to the left of the screen. The camera switches to another scene, where two children and an adult are walking on the land next to the sidewalk. The adult is dressed in black. The child walking in front is wearing a pirate costume, and the child walking behind is wearing a blue hoodie, black pants and blue boots, and holding a small basket in his left hand. In the background is a giant white ghost sculpture and several low-rise houses.", "visual_elements": [ { "content": "The four people suddenly stop and look to the left of the screen.", "type": "action", "weight": 3 }, { "content": "The camera switches to another scene.", "type": "camera", "weight": 3 }, { "content": "Two children and an adult are walking on the land beside the sidewalk.", "type": "action", "weight": 3 }, { "content": "The adult is dressed in black.", "type": "attribute", "weight": 2 }, { "content": "The child walking in front is wearing a pirate costume.", "type": "attribute", "weight": 2 }, { "content": "The child walking behind is wearing a blue hoodie, black pants and blue boots.", "type": "attribute", "weight": 2 }, { "content": "The child walking behind is carrying a small basket in his left hand.", "type": "attribute", "weight": 2 }, { "content": "In the background is a giant white ghost sculpture and several low-rise houses.", "type": "scene", "weight": 2 } ] }, { "event": "A young man gestures with his right hand towards the camera, signaling the person in front to come over. He is wearing bunny ears and a bunny nose and mouth, white gloves on both hands, white trousers and white shoes, and holding a gray canvas bag in his left hand.", "visual_elements": [ { "content": "A young man gestures with his right hand towards the camera, signaling the person in front to come over.", "type": "action", "weight": 3 }, { "content": "The man making the gesture is wearing bunny ears and a bunny nose and mouth.", "type": "attribute", "weight": 2 }, { "content": "The man with the bunny decoration wears white gloves on both hands.", "type": "attribute", "weight": 2 }, { "content": "The man with the bunny decoration wears white trousers and white shoes.", "type": "attribute", "weight": 2 }, { "content": "The man with the bunny decoration carries a gray canvas bag in his left hand.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to the bunny-dressed man. He runs toward the camera with his head down, and behind him are a row of jack-o'-lanterns of varying sizes. Finally, the camera switches back to the original four people, who have stopped and are standing straight on the wet sidewalk. The mummy-dressed man holds out his hands. There is water on the sidewalk in the background.", "visual_elements": [ { "content": "The camera switches to the man in the bunny\ncostume.", "type": "camera", "weight": 3 }, { "content": "The man in the bunny costume lowers his head and runs towards the camera.", "type": "action", "weight": 3 }, { "content": "Behind the man in the bunny costume is a row of pumpkin lanterns of different sizes.", "type": "scene", "weight": 2 }, { "content": "The camera finally switches back to the original four people.", "type": "camera", "weight": 3 }, { "content": "The four people stop walking.", "type": "action", "weight": 3 }, { "content": "The four people stand straight on the wet sidewalk.", "type": "action", "weight": 3 }, { "content": "The man in the mummy costume stretches out his \nhands.", "type": "action", "weight": 3 }, { "content": "There is water on the sidewalk.", "type": "scene", "weight": 2 } ] } ], "n_events": 5, "n_elements": 32 }, { "index": "TUNA_0028", "video_path": "DREAM-1K/234.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.62, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is focused on a woman. She is wearing a yellow dress and riding a white horse. It is night time and the background is an ancient stone building. There is also a black horse on the right side of the screen. The woman holds the black reins in her left hand and looks to her left.\nThe camera switches to an elderly man wearing a black uniform, a white wig and a black hat, sitting in the front of the carriage with a serious look on his face. Behind the carriage, there are several lines of people walking with torches.\nThen, the camera switches to the front of the two black horses. Then the camera switches again, with a close-up of the horses' hooves. Then, the camera switches to a farther distance. A crowd gathers in the square in front of the building, holding torches. In front of the crowd, the woman is still riding on the white horse, and the two black horses are walking towards the woman.\nThe camera cuts back to the woman, who jumps off the white horse and then faces the left side of the screen. Then, the camera switches again, and a middle-aged man in a red coat stands in front of the crowd, with dense crowds and torches in the background.\nThe camera follows the woman from behind. The woman runs straight toward the crowd, holding an object in her right hand. She then turns to the carriage on her left. The crowd watches the woman and witnesses this process.", "events": [ { "event": "At the beginning of the video, the camera is focused on a woman. She is wearing a yellow dress and riding a white horse. It is night time and the background is an ancient stone building. There is also a black horse on the right side of the screen. The woman holds the black reins in her left hand and looks to her left.", "visual_elements": [ { "content": "The camera is focused on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a long yellow dress and riding a white horse.", "type": "attribute", "weight": 3 }, { "content": "The background is in front of an old stone building at \nnight.", "type": "scene", "weight": 3 }, { "content": "There is also a black horse on the right side of the picture.", "type": "scene", "weight": 1 }, { "content": "The woman holds the black reins in her left hand and looks to her left.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to an elderly man wearing a black uniform, a white wig and a black hat, sitting in the front of the carriage with a serious look on his face. Behind the carriage, there are several lines of people walking with torches.", "visual_elements": [ { "content": "The camera switches to an old man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black uniform, a white wig and a black hat.", "type": "attribute", "weight": 2 }, { "content": "The man sits in the front of the carriage, looking very serious.", "type": "attribute", "weight": 2 }, { "content": "There are several lines of people walking with torches.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to the front of the two black horses. Then the camera switches again, with a close-up of the horses' hooves. Then, the camera switches to a farther distance. A crowd gathers in the square in front of the building, holding torches. In front of the crowd, the woman is still riding on the white horse, and the two black horses are walking towards the woman.", "visual_elements": [ { "content": "The camera switches to the front of two black horses.", "type": "camera", "weight": 3 }, { "content": "The camera switches again, with a close-up of the horses' hooves.", "type": "camera", "weight": 3 }, { "content": "The camera switches to a farther distance.", "type": "camera", "weight": 3 }, { "content": "A crowd gathers in the square in front of the building, holding torches.", "type": "scene", "weight": 2 }, { "content": "In front of the crowd, the woman is still riding a white \nhorse.", "type": "attribute", "weight": 1 }, { "content": "The two black horses are walking towards the woman.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts back to the woman, who jumps off the white horse and then faces the left side of the screen. Then, the camera switches again, and a middle-aged man in a red coat stands in front of the crowd, with dense crowds and torches in the background.", "visual_elements": [ { "content": "The camera cuts back to the woman again.", "type": "camera", "weight": 3 }, { "content": "The woman jumps off the white horse and then faces the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The camera switches again, and a middle-aged man in a red coat stands in front of the crowd.", "type": "attribute", "weight": 3 }, { "content": "The background is a dense crowd and torches.", "type": "scene", "weight": 2 } ] }, { "event": "The camera follows the woman from behind. The woman runs straight toward the crowd, holding an object in her right hand. She then turns to the carriage on her left. The crowd watches the woman and witnesses this process.", "visual_elements": [ { "content": "The camera follows the woman from behind.", "type": "camera", "weight": 3 }, { "content": "The woman runs straight towards the crowd.", "type": "action", "weight": 3 }, { "content": "At the same time, the woman holds an object in her right hand.", "type": "action", "weight": 1 }, { "content": "Then the woman turns to the carriage on her left.", "type": "action", "weight": 3 }, { "content": "The crowd looks at the woman and witnesses her \nrunning.", "type": "action", "weight": 1 } ] } ], "n_events": 5, "n_elements": 24 }, { "index": "TUNA_0029", "video_path": "DREAM-1K/235.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 8.88, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is focused on a white gorilla locked in a gray iron door. The gorilla is very angry and opens its mouth wide to roar at a black woman passing by the door, showing its sharp teeth. The camera moves to the right to show a bald man.\nThen, the camera switches to a farther angle, showing the entire room. In the center left of the screen is a young black woman wearing a gray coat and black tights. The black woman hurriedly backs away. In the center right of the screen is a middle-aged bald man wearing a brown leather jacket and light-colored trousers. The middle-aged man shields the black woman with his right hand while backing away. The gorilla is very tall, and its front limbs fall to the ground. The gorilla looks at the man and then roars.\nThe camera switches to the middle-aged man behind the iron door. The middle-aged man opens his mouth and says something to the camera. Behind him is an experimental room, where a young man wearing a white lab coat and blue gloves stands and looks to his left. The camera quickly cuts back to the gorilla and rotates quickly to the left, then moves down. The gorilla turns his head to the left of the screen, lowers his head and shouts, then gets on all fours and crashes to the left of the screen.\nThe camera switches to the outside of the iron gate, facing the middle-aged man and the black woman. The middle-aged man is on the left side of the screen, and the black woman is on the right side of the screen. The middle-aged man and the black woman both look to their left front, and they look very surprised and nervous. Behind the middle-aged man and the black woman are three young staff members wearing white lab coats. Behind the two, three staff members, including two women and one man, look forward.\nThe camera switches to the gorilla in the cage again and moves upwards following the gorilla's movements. It stands up from a crouch and picks up a heavy object with both hands. The camera follows the gorilla as it quickly turns to the right. The gorilla turns to the right side of the gorilla while lifting the heavy object with both hands and throwing it forward.\nThe camera cuts back to the outside of the iron gate. The middle-aged man stretches out his hands and lowers them, signaling the gorilla to stop. The black woman tilts her head back and looks at the gorilla, stunned. The experimenters behind the two quickly step back.\nThe camera cuts back to the gorilla outside the iron gate, showing that the gorilla turns to the right side of the screen and throws a heavy object directly at the camera, which is about to hit the iron gate.", "events": [ { "event": "At the beginning of the video, the camera is focused on a white gorilla locked in a gray iron door. The gorilla is very angry and opens its mouth wide to roar at a black woman passing by the door, showing its sharp teeth. The camera moves to the right to show a bald man.", "visual_elements": [ { "content": "The camera is focused on a white gorilla locked in a gray iron door.", "type": "camera", "weight": 3 }, { "content": "The gorilla is very angry.", "type": "attribute", "weight": 2 }, { "content": "The gorilla opens its mouth wide and yells at a black woman passing by the door.", "type": "action", "weight": 3 }, { "content": "The gorilla shows its sharp teeth.", "type": "action", "weight": 1 }, { "content": "The camera moves to the right to show a bald man.", "type": "camera", "weight": 3 } ] }, { "event": "Then, the camera switches to a farther angle, showing the entire room. In the center left of the screen is a young black woman wearing a gray coat and black tights. The black woman hurriedly backs away. In the center right of the screen is a middle-aged bald man wearing a brown leather jacket and light-colored trousers. The middle-aged man shields the black woman with his right hand while backing away. The gorilla is very tall, and its front limbs fall to the ground. The gorilla looks at the man and then roars.", "visual_elements": [ { "content": "Then, the camera switches to a farther angle, showing the entire room.", "type": "camera", "weight": 3 }, { "content": "In the center left of the picture is a young black woman wearing a gray coat and black tights.", "type": "camera", "weight": 3 }, { "content": "The black woman hurriedly backs away.", "type": "action", "weight": 3 }, { "content": "In the center right of the picture is a middle-aged bald man.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man wears a brown leather jacket and light-colored trousers.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man shields the black woman with his right hand while backing away.", "type": "action", "weight": 2 }, { "content": "The gorilla is very tall.", "type": "attribute", "weight": 2 }, { "content": "The gorilla's front limbs fall to the ground.", "type": "action", "weight": 1 }, { "content": "The gorilla looks at the man and then roars.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the middle-aged man behind the iron door. The middle-aged man opens his mouth and says something to the camera. Behind him is an experimental room, where a young man wearing a white lab coat and blue gloves stands and looks to his left. The camera quickly cuts back to the gorilla and rotates quickly to the left, then moves down. The gorilla turns his head to the left of the screen, lowers his head and shouts, then gets on all fours and crashes to the left of the screen.", "visual_elements": [ { "content": "The camera switches to the middle-aged man behind the iron gate.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man opens his mouth and says something to the camera.", "type": "action", "weight": 3 }, { "content": "Behind the middle-aged man is an experimental \nroom.", "type": "scene", "weight": 2 }, { "content": "There is a young man wearing a white lab coat and blue gloves.", "type": "scene", "weight": 2 }, { "content": "The man in the lab coat stands and looks to his left.", "type": "action", "weight": 1 }, { "content": "The camera quickly switches back to the gorilla and rotates quickly to the left, and finally moves down.", "type": "camera", "weight": 3 }, { "content": "The gorilla looks back to the left side of the screen, lowers his head and shouts.", "type": "action", "weight": 3 }, { "content": "The gorilla gets on all fours and crashes to the left side of the screen.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the outside of the iron gate, facing the middle-aged man and the black woman. The middle-aged man is on the left side of the screen, and the black woman is on the right side of the screen. The middle-aged man and the black woman both look to their left front, and they look very surprised and nervous. Behind the middle-aged man and the black woman are three young staff members wearing white lab coats. Behind the two, three staff members, including two women and one man, look forward.", "visual_elements": [ { "content": "The camera switches to the outside of the iron gate, facing the middle-aged man and the black woman.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man is on the left side of the screen, and the black woman is on the right side of the \nscreen.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man and the black woman both look to their left front.", "type": "action", "weight": 1 }, { "content": "The middle-aged man and the black woman look very surprised and nervous.", "type": "attribute", "weight": 2 }, { "content": "Behind the middle-aged man and the black woman are three young staff members wearing white lab \ncoats.", "type": "scene", "weight": 2 }, { "content": "Three staff members, including two women and one man, they are looking forward.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches to the gorilla in the cage again and moves upwards following the gorilla's movements. It stands up from a crouch and picks up a heavy object with both hands. The camera follows the gorilla as it quickly turns to the right. The gorilla turns to the right side of the gorilla while lifting the heavy object with both hands and throwing it forward.", "visual_elements": [ { "content": "The camera switches to the gorilla in the cage again and moves upwards following the gorilla's \nmovements.", "type": "camera", "weight": 3 }, { "content": "The gorilla stands up from a squat and picks up a heavy object with both hands.", "type": "action", "weight": 3 }, { "content": "The camera follows the gorilla and quickly rotates to the right.", "type": "camera", "weight": 3 }, { "content": "The gorilla turns to the right side of the gorilla.", "type": "action", "weight": 3 }, { "content": "The gorilla lifts the heavy object with both hands at the same time and throws it forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the outside of the iron gate. The middle-aged man stretches out his hands and lowers them, signaling the gorilla to stop. The black woman tilts her head back and looks at the gorilla, stunned. The experimenters behind the two quickly step back.", "visual_elements": [ { "content": "The camera cuts back to the outside of the iron gate again.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man stretches out his hands and lowers them, signaling the gorilla to stop.", "type": "action", "weight": 3 }, { "content": "The black woman tilts her head back and looks at the gorilla.", "type": "action", "weight": 3 }, { "content": "The black woman is stunned.", "type": "attribute", "weight": 2 }, { "content": "The experimenter behind the middle-aged man and the black woman quickly retreats.", "type": "action", "weight": 1 } ] }, { "event": "The camera cuts back to the gorilla outside the iron gate, showing that the gorilla turns to the right side of the screen and throws a heavy object directly at the camera, which is about to hit the iron gate.", "visual_elements": [ { "content": "The camera cuts back to the gorilla outside the iron \ngate.", "type": "camera", "weight": 3 }, { "content": "The gorilla turns to the right of the screen.", "type": "action", "weight": 2 }, { "content": "The gorilla throws a heavy object at the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 7, "n_elements": 41 }, { "index": "TUNA_0030", "video_path": "DREAM-1K/238.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is focused on the upper body of a middle-aged man. He is wearing a light blue coat and a white inner garment, sitting facing the camera, with a rather haggard face. The middle-aged man's eyes turn from his right side to his left side. At the same time, there is a man in a black T-shirt in the background facing the middle-aged man sideways, walking from the left side of the screen to the right side of the screen. The middle-aged man's eyes turn as the man in black walks. The middle-aged man turns his head to his left side and smiles at the same time. The man in black walks out of the screen completely. The background is blurred, and it is a relatively dark room with three white light sources on the ceiling.\nThen, the camera switches to the right side of the middle-aged man. The background to the left of the middle-aged man is blurred, and there is a row of men sitting there wearing the same shirts as the middle-aged man. Then the camera moves slowly to the left, and the man continues to smile while nodding his head to his right. A hand appears in the picture. The camera switches quickly, giving the hand a very short close-up shot. The hand reaches out to a green orange on the table. The camera switches to a farther scene, and the background becomes clear. There are two dining tables in the picture. On the dining table close to the camera, the middle-aged man holds the left wrist of the standing man with his right hand. The middle-aged man also glances to the right at the standing man. The standing man's left hand is against the table, holding the orange. The standing man leans forward, with the fingers of his right hand bent to support the table\nAs for the background table far from the camera, a bald man looks up to his left at the middle-aged man, holding the cutlery in his left hand. To the right of the bald man, a curly-haired man is chewing food with his head slightly lowered. Opposite the bald man, a man with a thick beard is sitting.", "events": [ { "event": "At the beginning of the video, the camera is focused on the upper body of a middle-aged man. He is wearing a light blue coat and a white inner garment, sitting facing the camera, with a rather haggard face. The middle-aged man's eyes turn from his right side to his left side. At the same time, there is a man in a black T-shirt in the background facing the middle-aged man sideways, walking from the left side of the screen to the right side of the screen. The middle-aged man's eyes turn as the man in black walks. The middle-aged man turns his head to his left side and smiles at the same time. The man in black walks out of the screen completely. The background is blurred, and it is a relatively dark room with three white light sources on the ceiling.", "visual_elements": [ { "content": "The camera is focused on the upper body of a middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man is wearing a light blue coat and a white inner garment.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man is sitting facing the camera.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man's eyes turn from his right side to his left side.", "type": "camera", "weight": 3 }, { "content": "In the background, there is a man in a black T-shirt facing the middle-aged man from the left side of the screen to the right side of the screen.", "type": "action", "weight": 1 }, { "content": "The middle-aged man's eyes turn as the man in black walks.", "type": "action", "weight": 3 }, { "content": "The middle-aged man turns his head to his left side and smiles.", "type": "action", "weight": 3 }, { "content": "The man in black walks out of the screen completely.", "type": "action", "weight": 1 }, { "content": "The background is blurred, and it is a relatively dark \nroom.", "type": "scene", "weight": 2 }, { "content": "There are 3 white light sources on the roof.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to the right side of the middle-aged man. The background to the left of the middle-aged man is blurred, and there is a row of men sitting there wearing the same shirts as the middle-aged man. Then the camera moves slowly to the left, and the man continues to smile while nodding his head to his right. A hand appears in the picture. The camera switches quickly, giving the hand a very short close-up shot. The hand reaches out to a green orange on the table. The camera switches to a farther scene, and the background becomes clear. There are two dining tables in the picture. On the dining table close to the camera, the middle-aged man holds the left wrist of the standing man with his right hand. The middle-aged man also glances to the right at the standing man. The standing man's left hand is against the table, holding the orange. The standing man leans forward, with the fingers of his right hand bent to support the table", "visual_elements": [ { "content": "The camera switches to the right side of the middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The background on the left side of the middle-aged man is blurred, and there is a row of men wearing the same shirts as the middle-aged man.", "type": "scene", "weight": 2 }, { "content": "The camera moves slowly to the left.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man continues to smile.", "type": "action", "weight": 2 }, { "content": "The middle-aged man also shakes his head to his right.", "type": "action", "weight": 3 }, { "content": "A hand appears in the picture.", "type": "camera", "weight": 3 }, { "content": "The camera switches quickly, giving a very short close-up shot of a hand.", "type": "camera", "weight": 3 }, { "content": "A hand reaches out to a green orange on the table.", "type": "action", "weight": 3 }, { "content": "The camera switches to a distant scene, and the background becomes clear.", "type": "camera", "weight": 3 }, { "content": "There are two dining tables in the picture.", "type": "camera", "weight": 3 }, { "content": "On the dining table close to the camera, the middle-aged man holds the left wrist of the standing man with his right hand.", "type": "action", "weight": 3 }, { "content": "The middle-aged man glances to the right at the standing man at the same time.", "type": "action", "weight": 3 }, { "content": "The standing man's left hand is against the table, holding the orange.", "type": "action", "weight": 3 }, { "content": "The standing man leans forward, with his right hand fingers bent to support the table.", "type": "action", "weight": 3 } ] }, { "event": "As for the background table far from the camera, a bald man looks up to his left at the middle-aged man, holding the cutlery in his left hand. To the right of the bald man, a curly-haired man is chewing food with his head slightly lowered. Opposite the bald man, a man with a thick beard is sitting.", "visual_elements": [ { "content": "For the background table far from the camera, a bald man looks up to his left at the middle-aged man.", "type": "action", "weight": 1 }, { "content": "The bald man holds the tableware in his left hand.", "type": "action", "weight": 1 }, { "content": "To the right of the bald man, a curly-haired man is chewing food with his head slightly lowered.", "type": "scene", "weight": 2 }, { "content": "Opposite the bald man, a man with a thick beard sits.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 28 }, { "index": "TUNA_0031", "video_path": "DREAM-1K/240.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 9.62, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is focused on a red SUV. The SUV is driving on a road with a dense forest on the left side of the road. The SUV hits a roadside sign post and barbed wire, and then backs up. The front of the SUV is severely damaged and smoke is coming out. A middle-aged man in light-colored clothes is sitting in the passenger seat.\nThe camera switches to the rear window of the SUV. Six male soldiers in camouflage uniforms, armed with weapons, stand in a defensive posture on the road. Behind them are two green military vehicles. Then they quickly turn around and run towards the vehicles.\nThe camera switches again, returning to the right view closer to the SUV. The SUV bypasses the wire fence and drives quickly to the left front of the road. Smoke continues to come out of the front of the car.\nThen, the camera switches to a closer view of the six soldiers. Three of them board the military vehicle on the left side of the screen, and three board the military vehicle on the right side of the screen, preparing to chase the SUV. In the distance, the sky is covered with gray clouds, and there are many trees on both sides of the road.\nFinally, the camera switches to show a section of the highway in the direction of the SUV. There is a sign on the right side of the highway that says turn left, and the road ahead turns left. There are still many trees on both sides of the highway. The tops of the trees are shaking.", "events": [ { "event": "At the beginning of the video, the camera is focused on a red SUV. The SUV is driving on a road with a dense forest on the left side of the road. The SUV hits a roadside sign post and barbed wire, and then backs up. The front of the SUV is severely damaged and smoke is coming out. A middle-aged man in light-colored clothes is sitting in the passenger seat.", "visual_elements": [ { "content": "The camera is focused on a red SUV.", "type": "camera", "weight": 3 }, { "content": "The SUV is driving on a road.", "type": "action", "weight": 3 }, { "content": "There is a dense forest on the left side of the road.", "type": "scene", "weight": 2 }, { "content": "The SUV hits a roadside sign post and barbed wire.", "type": "action", "weight": 3 }, { "content": "The SUV backs up.", "type": "action", "weight": 3 }, { "content": "The front of the SUV is seriously damaged and smoke is coming out.", "type": "attribute", "weight": 2 }, { "content": "A middle-aged man in light-colored clothes is sitting on the passenger seat of the SUV.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera switches to the rear window of the SUV. Six male soldiers in camouflage uniforms, armed with weapons, stand in a defensive posture on the road. Behind them are two green military vehicles. Then they quickly turn around and run towards the vehicles.", "visual_elements": [ { "content": "The camera switches to the perspective of the rear window of the SUV.", "type": "camera", "weight": 3 }, { "content": "There are 6 male soldiers in camouflage uniforms.", "type": "camera", "weight": 3 }, { "content": "The 6 soldiers are holding weapons and standing on the road in a defensive posture.", "type": "action", "weight": 3 }, { "content": "Behind the 6 soldiers are two green military vehicles.", "type": "scene", "weight": 2 }, { "content": "The 6 soldiers quickly turn around and run towards the two military vehicles.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again, returning to the right view closer to the SUV. The SUV bypasses the wire fence and drives quickly to the left front of the road. Smoke continues to come out of the front of the car.", "visual_elements": [ { "content": "The camera switches again, returning to the right perspective closer to the SUV.", "type": "camera", "weight": 3 }, { "content": "The SUV bypasses the barbed wire and drives quickly to the left front of the road.", "type": "action", "weight": 3 }, { "content": "Smoke continues to come out of the front of the SUV.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera switches to a closer view of the six soldiers. Three of them board the military vehicle on the left side of the screen, and three board the military vehicle on the right side of the screen, preparing to chase the SUV. In the distance, the sky is covered with gray clouds, and there are many trees on both sides of the road.", "visual_elements": [ { "content": "The camera switches to a closer perspective of the 6 soldiers.", "type": "camera", "weight": 3 }, { "content": "Three of the 6 soldiers board the military vehicle on the left side of the screen, and three board the military vehicle on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "In the distance, the sky is covered with gray clouds, and there are many trees on both sides of the road.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera switches to show a section of the highway in the direction of the SUV. There is a sign on the right side of the highway that says turn left, and the road ahead turns left. There are still many trees on both sides of the highway. The tops of the trees are shaking.", "visual_elements": [ { "content": "The camera switches to show a section of the road in the direction of the SUV.", "type": "camera", "weight": 3 }, { "content": "There is a sign on the signpost saying turn left.", "type": "scene", "weight": 2 }, { "content": "The road turns left in front of the signpost.", "type": "scene", "weight": 2 }, { "content": "There are still many trees on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "The tops of the trees are shaking.", "type": "scene", "weight": 1 } ] } ], "n_events": 5, "n_elements": 23 }, { "index": "TUNA_0032", "video_path": "DREAM-1K/241.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 6.75, "resolution": { "width": 1920, "height": 784 }, "caption": "The video begins with a bird's-eye view of a little boy in a loose grey robe. The boy is suspended in the air, facing the camera, with his arms and legs spread out. In the background is a green forest and a waterfall on the ground. Pink and blue clouds float in the sky around the boy.\nThe camera slowly zooms in. The boy is falling faster and faster, becoming smaller and smaller in the frame. A small pink cloud on the left side of the frame is also falling quickly. To the lower left of the boy, another child is also falling quickly. The waterfall in the background becomes clearer, and there is a blue river in the forest below.\nThe camera switches to a dense canopy, shot from bottom to top. Dark green leaves occupy the entire frame, with sunlight filtering through the gaps. Suddenly, a figure quickly falls through the leaves.\nThe camera switches again. There is a blue river in the picture, with branches and leaves above the river, lush green forests and a waterfall in the distance. On the right side of the picture, the little boy and another child quickly fall into the river, stirring up two huge splashes of water.", "events": [ { "event": "The video begins with a bird's-eye view of a little boy in a loose grey robe. The boy is suspended in the air, facing the camera, with his arms and legs spread out. In the background is a green forest and a waterfall on the ground. Pink and blue clouds float in the sky around the boy.", "visual_elements": [ { "content": "The camera is shooting from above a little boy in a loose grey robe.", "type": "camera", "weight": 3 }, { "content": "The little boy is suspended in the air, facing the \ncamera.", "type": "camera", "weight": 3 }, { "content": "The little boy has his arms and legs spread out.", "type": "attribute", "weight": 2 }, { "content": "The background is a green forest on the ground and a waterfall.", "type": "scene", "weight": 2 }, { "content": "Pink and blue clouds are floating in the sky around the little boy.", "type": "scene", "weight": 2 } ] }, { "event": "The camera slowly zooms in. The boy is falling faster and faster, becoming smaller and smaller in the frame. A small pink cloud on the left side of the frame is also falling quickly. To the lower left of the boy, another child is also falling quickly. The waterfall in the background becomes clearer, and there is a blue river in the forest below.", "visual_elements": [ { "content": "The camera slowly zooms in.", "type": "camera", "weight": 3 }, { "content": "The little boy is falling faster and faster.", "type": "action", "weight": 3 }, { "content": "A small pink cloud is also falling quickly on the left side of the screen.", "type": "action", "weight": 1 }, { "content": "Another child is also falling quickly to the lower left of the little boy.", "type": "action", "weight": 3 }, { "content": "The waterfall in the background is clearer, and there is a blue river in the forest below.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a dense canopy, shot from bottom to top. Dark green leaves occupy the entire frame, with sunlight filtering through the gaps. Suddenly, a figure quickly falls through the leaves.", "visual_elements": [ { "content": "The camera switches to a dense canopy, shooting from the bottom up.", "type": "camera", "weight": 3 }, { "content": "Dark green leaves occupy the entire screen, and sunlight shines through the gaps.", "type": "camera", "weight": 3 }, { "content": "Suddenly, a figure falls quickly through the leaves.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again. There is a blue river in the picture, with branches and leaves above the river, lush green forests and a waterfall in the distance. On the right side of the picture, the little boy and another child quickly fall into the river, stirring up two huge splashes of water.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "There is a blue river in the picture.", "type": "camera", "weight": 3 }, { "content": "There are branches and leaves above the river.", "type": "scene", "weight": 2 }, { "content": "In the distance are lush green forests and a waterfall.", "type": "scene", "weight": 2 }, { "content": "On the right side of the picture, the little boy and another child quickly fall into the river.", "type": "action", "weight": 3 }, { "content": "The little boy and another child create two huge splashes of water, splashing everywhere.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0033", "video_path": "DREAM-1K/243.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 9.75, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera shows a vast meadow with tall grass and some colorful flowers. There are some white clouds in the sky and some tall trees in the distance. There is a group of people on the left side of the screen, mainly some children and adults. They are wearing colorful clothes.\nOn the left side of the picture, there is a little girl with long hair wearing a purple dress. The camera gradually moves to the lower right, and the little girl runs towards the camera on the grass with brisk movements. In the crowd behind her, some children are chasing each other and playing.\nThen, the camera switches to a close-up shot of an adult man and an adult woman. The man is on the left side of the screen, wearing a blue shirt and a white inner layer. The woman is on the right side of the screen, wearing a white dress and a transparent shawl. The man puts his hands on his chest, with his left hand on top of his right hand, looks straight ahead, and then smiles. The woman tilts her head to her right, looks at the man, and takes a step forward.\nThe camera switches to a closer view of the little girl on the grass. The little girl continues to run towards the camera on the grass, and the camera gradually moves down following her movements. She runs a few steps and then stops, standing on the grass. The little girl leans forward, with her arms bent, looking forward and grinning.", "events": [ { "event": "At the beginning of the video, the camera shows a vast meadow with tall grass and some colorful flowers. There are some white clouds in the sky and some tall trees in the distance. There is a group of people on the left side of the screen, mainly some children and adults. They are wearing colorful clothes.", "visual_elements": [ { "content": "The camera shows a vast meadow.", "type": "camera", "weight": 3 }, { "content": "The grass on the meadow is relatively tall, with some colorful flowers.", "type": "attribute", "weight": 2 }, { "content": "There are some white clouds floating in the sky, and some tall trees in the distance.", "type": "scene", "weight": 2 }, { "content": "There is a group of people on the left side of the screen, far away from the camera, mainly some children and adults.", "type": "scene", "weight": 2 }, { "content": "A group of people are wearing colorful clothes.", "type": "attribute", "weight": 1 } ] }, { "event": "On the left side of the picture, there is a little girl with long hair wearing a purple dress. The camera gradually moves to the lower right, and the little girl runs towards the camera on the grass with brisk movements. In the crowd behind her, some children are chasing each other and playing.", "visual_elements": [ { "content": "There is a little girl with long hair wearing a purple dress on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The camera gradually moves to the lower right.", "type": "camera", "weight": 3 }, { "content": "The little girl is running towards the camera on the \ngrass.", "type": "action", "weight": 3 }, { "content": "The little girl moves briskly.", "type": "attribute", "weight": 2 }, { "content": "There are some children chasing each other behind the little girl.", "type": "action", "weight": 1 } ] }, { "event": "Then, the camera switches to a close-up shot of an adult man and an adult woman. The man is on the left side of the screen, wearing a blue shirt and a white inner layer. The woman is on the right side of the screen, wearing a white dress and a transparent shawl. The man puts his hands on his chest, with his left hand on top of his right hand, looks straight ahead, and then smiles. The woman tilts her head to her right, looks at the man, and takes a step forward.", "visual_elements": [ { "content": "The camera switches to a close-up of an adult man and an adult woman.", "type": "camera", "weight": 3 }, { "content": "The man stands on the left side of the screen, wearing a blue shirt and a white inner layer.", "type": "attribute", "weight": 2 }, { "content": "The woman stands on the right side of the screen, wearing a white dress and a transparent shawl.", "type": "attribute", "weight": 2 }, { "content": "The man puts his hands on his chest, with his left hand on top of his right hand.", "type": "action", "weight": 3 }, { "content": "The man looks straight ahead and then smiles.", "type": "action", "weight": 3 }, { "content": "The woman tilts her head to her right and looks at the man.", "type": "action", "weight": 2 }, { "content": "The woman takes a step forward at the same time.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a closer view of the little girl on the grass. The little girl continues to run towards the camera on the grass, and the camera gradually moves down following her movements. She runs a few steps and then stops, standing on the grass. The little girl leans forward, with her arms bent, looking forward and grinning.", "visual_elements": [ { "content": "The camera switches to a closer view of the little girl on the grass.", "type": "camera", "weight": 3 }, { "content": "The little girl continues to run towards the camera on the grass.", "type": "action", "weight": 2 }, { "content": "The camera gradually moves down following the little girl's movements.", "type": "camera", "weight": 3 }, { "content": "The little girl stops after running a few steps and stands on the grass.", "type": "action", "weight": 3 }, { "content": "The little girl leans forward with her arms bent.", "type": "action", "weight": 3 }, { "content": "The little girl looks ahead and smiles.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0034", "video_path": "DREAM-1K/245.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 9.25, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a man wearing a brown coat, blue jeans and a gray knitted hat. He lies on the ground with his left face facing the camera, his eyes a little confused. There is some white snow scattered around him.\nThen, the man uses his left hand to support his body, slowly raises his head and upper body, and looks forward. The camera continues to shoot from his left side and moves up to follow his movements.\nThe camera switches to a close-up and moves upwards. There is a hole in a brick wall, and the light is bright outside. Finally, the camera returns to the man again and moves upwards with his movements. He finally stands up with his hands hanging down. There is snow on the man's hat. The background is the interior of an old house, and light shines in from the broken roof.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man wearing a brown coat, blue jeans and a gray knitted hat. He lies on the ground with his left face facing the camera, his eyes a little confused. There is some white snow scattered around him.", "visual_elements": [ { "content": "The camera focuses on a man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a brown coat, blue jeans and a gray knitted hat.", "type": "attribute", "weight": 2 }, { "content": "The man lies on the ground with his left face facing the camera.", "type": "attribute", "weight": 2 }, { "content": "The man's eyes look a little confused.", "type": "attribute", "weight": 1 }, { "content": "There is some white snow scattered around the man.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the man uses his left hand to support his body, slowly raises his head and upper body, and looks forward. The camera continues to shoot from his left side and moves up to follow his movements.", "visual_elements": [ { "content": "The man uses his left hand to support his body.", "type": "action", "weight": 3 }, { "content": "The man slowly raises his head and upper body and looks forward.", "type": "action", "weight": 3 }, { "content": "The camera continues to shoot from his left side and moves up to follow his movements.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches to a close-up and moves upwards. There is a hole in a brick wall, and the light is bright outside. Finally, the camera returns to the man again and moves upwards with his movements. He finally stands up with his hands hanging down. There is snow on the man's hat. The background is the interior of an old house, and light shines in from the broken roof.", "visual_elements": [ { "content": "The camera switches to a close-up and moves \nupwards.", "type": "camera", "weight": 3 }, { "content": "There is a hole in a brick wall.", "type": "camera", "weight": 3 }, { "content": "The light is bright outside the hole.", "type": "attribute", "weight": 1 }, { "content": "The camera returns to the man again and moves upwards following his movements.", "type": "camera", "weight": 3 }, { "content": "The man finally stands up with his hands hanging \ndown.", "type": "action", "weight": 3 }, { "content": "There is snow on the man's hat.", "type": "attribute", "weight": 1 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 }, { "content": "The background is the interior of an old house.", "type": "scene", "weight": 2 }, { "content": "The light shines in from the broken roof.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0035", "video_path": "DREAM-1K/246.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.0, "resolution": { "width": 1920, "height": 816 }, "caption": "The video begins as the camera closed up to shoot a young woman. She has long blonde hair and wears a deep blue coat with a zip. The background is blurred. The camera gradually zooms in and shows the woman’s hair fluttering on her back. At the same time, drops of blood flow down from her left nostril. There is light illuminating on the woman’s face.\nNext, the camera switches to a wider scene. In the background, there is a brightly lit building with an open door on the right side of the screen. There are several trees in front of the building and a black car is parked there. The woman is on the right edge of the screen. Her hands stretch to both sides, and her left knee bends, creating a huge flame wall forward. The color of the flame changes from blue on the right side of the screen to orange on the left side of the screen. Her body leans forward slightly, and she looks very focused. The flame is dancing towards the camera.\nThe camera switches perspective again. There is a muscular man on the right side of the screen. He stands at the front edge of the flame with his back to the camera, staring ahead. The flame wall whirls to the sky and gradually burns more violently. Its blue edge disappears and at the same time the interior of the flame changes to a deeper color that is more similar to the color of the naked flame. The flame is spreading towards the man and almost encircles him.", "events": [ { "event": "The video begins as the camera closed up to shoot a young woman. She has long blonde hair and wears a deep blue coat with a zip. The background is blurred. The camera gradually zooms in and shows the woman’s hair fluttering on her back. At the same time, drops of blood flow down from her left nostril. There is light illuminating on the woman’s face.", "visual_elements": [ { "content": "The camera closed up to shoot a young woman.", "type": "camera", "weight": 3 }, { "content": "She has long blonde hair and wears a deep blue coat with a zip.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred.", "type": "scene", "weight": 2 }, { "content": "The camera gradually zooms in.", "type": "camera", "weight": 3 }, { "content": "The woman's hair is fluttering on her back.", "type": "action", "weight": 1 }, { "content": "At the same time, drops of blood flow down from the woman's left nostril.", "type": "action", "weight": 3 }, { "content": "There is light illuminating on the woman's face.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera switches to a wider scene. In the background, there is a brightly lit building with an open door on the right side of the screen. There are several trees in front of the building and a black car is parked there. The woman is on the right edge of the screen. Her hands stretch to both sides, and her left knee bends, creating a huge flame wall forward. The color of the flame changes from blue on the right side of the screen to orange on the left side of the screen. Her body leans forward slightly, and she looks very focused. The flame is dancing towards the camera.", "visual_elements": [ { "content": "The camera switches to a wider scene.", "type": "camera", "weight": 3 }, { "content": "In the background, there is a brightly lit building with an open door on the right side of the screen. There are several trees in front of the building and a black car is parked there.", "type": "scene", "weight": 2 }, { "content": "The woman is on the right edge of the screen.", "type": "camera", "weight": 3 }, { "content": "The woman's hands stretch to both sides, and her left knee bends, creating a huge flame wall forward.", "type": "action", "weight": 3 }, { "content": "The color of the flame changes from blue on the right side of the screen to orange on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The woman's body leans forward slightly.", "type": "action", "weight": 1 }, { "content": "The woman looks very focused.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches perspective again. There is a muscular man on the right side of the screen. He stands at the front edge of the flame with his back to the camera, staring ahead. The flame wall whirls to the sky and gradually burns more violently. Its blue edge disappears and at the same time the interior of the flame changes to a deeper color that is more similar to the color of the naked flame. The flame is spreading towards the man and almost encircles him.", "visual_elements": [ { "content": "The camera switches perspective again. There is a muscular man on the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "There are trees on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "The man is back to the camera.", "type": "camera", "weight": 3 }, { "content": "The man stands at the front edge of the flame, staring ahead.", "type": "action", "weight": 3 }, { "content": "The camera moves upwards slightly.", "type": "camera", "weight": 3 }, { "content": "The flame wall whirls to the sky and gradually burns more violently.", "type": "action", "weight": 3 }, { "content": "Its blue edge disappears and at the same time the interior of the flame changes to a deeper color that is more similar to the color of the naked flame.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0036", "video_path": "DREAM-1K/250.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 9.88, "resolution": { "width": 1920, "height": 800 }, "caption": "The camera zooms in on the front of a young woman and shakes slightly at the same time. The woman has long dark hair and she wears a light coat. The woman stands on the left side of the screen, looking straight ahead and talking. There is a brown wooden shutter behind the woman, and there are beaches, blue sea and cloudy blue sky outside the window.\nThen, the camera switches to the side of a young man and shakes slightly at the same time. He has short hair and wears a dark shirt. He raises his right eyebrow, then looks up to his right and looks at the camera. Behind the man are some green plants.\nThen, the camera switches to a spacious bedroom. There is a black cabinet on the left side of the bedroom. In the center of the bedroom is a big bed covered with a white sheet. There is a desk in front of the bed with a suitcase on it. There are some artworks and furniture in the bedroom. The man appears in the room from the left side of the screen rapidly, then he moves quickly and puts the things in the cupboard into the suitcase.\nThe camera switches to the right side of the screen again, and the woman puts her hands behind her back, leans against the brown door frame and watches the man's movements. Suddenly, the man runs from the left side of the screen to the right side of the screen in a rush. The man holds a pile of clothes in his right hand and takes things out of the suitcase with his left hand.\nThe camera quickly switches to the suitcase, and the man takes away the clothes in the suitcase with one hand rapidly. The camera quickly switches back to the close-up of the woman. The woman looks ahead, frowning at the same time. The camera quickly switched, the man is taking the clothes from the bedroom to another room on the right side of the screen. The camera switches to an angle closer to the ground, and the woman in the foreground on the right side of the screen wears a white shorts. There is a chair on the left of the woman, and a light-colored dress is on the back of the chair. The man picks up an object on the ground.\nThe camera finally switches to the front of the woman. The woman leans against the door frame and looks to her right. The man picks up the item and runs quickly to the chair.", "events": [ { "event": "The camera zooms in on the front of a young woman and shakes slightly at the same time. The woman has long dark hair and she wears a light coat. The woman stands on the left side of the screen, looking straight ahead and talking. There is a brown wooden shutter behind the woman, and there are beaches, blue sea and cloudy blue sky outside the window.", "visual_elements": [ { "content": "The camera zooms in on the front of a young woman and shakes slightly at the same time.", "type": "camera", "weight": 3 }, { "content": "The woman has long dark hair and she wears a light coat.", "type": "attribute", "weight": 2 }, { "content": "The woman stands on the left side of the screen, looking straight ahead and talking.", "type": "action", "weight": 3 }, { "content": "There is a brown wooden shutter behind the woman.", "type": "scene", "weight": 2 }, { "content": "Beaches, blue sea and cloudy blue sky can be seen outside the window.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to the side of a young man and shakes slightly at the same time. He has short hair and wears a dark shirt. He raises his right eyebrow, then looks up to his right and looks at the camera. Behind the man are some green plants.", "visual_elements": [ { "content": "The camera switches to the side of a young man and shakes slightly at the same time.", "type": "camera", "weight": 3 }, { "content": "The man has short hair and wears a dark shirt.", "type": "attribute", "weight": 2 }, { "content": "The man raises his right eyebrow, then looks up to his right and looks at the camera.", "type": "action", "weight": 3 }, { "content": "Behind the man are some green plants.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to a spacious bedroom. There is a black cabinet on the left side of the bedroom. In the center of the bedroom is a big bed covered with a white sheet. There is a desk in front of the bed with a suitcase on it. There are some artworks and furniture in the bedroom. The man appears in the room from the left side of the screen rapidly, then he moves quickly and puts the things in the cupboard into the suitcase.", "visual_elements": [ { "content": "The camera switches to a spacious bedroom.", "type": "camera", "weight": 3 }, { "content": "There is a black cabinet on the left side of the bedroom.", "type": "scene", "weight": 1 }, { "content": "In the center of the bedroom is a big bed covered with a white sheet.", "type": "scene", "weight": 2 }, { "content": "There is a desk in front of the bed with a suitcase on it.", "type": "scene", "weight": 1 }, { "content": "There are some artworks and furniture in the bedroom.", "type": "scene", "weight": 1 }, { "content": "The man appears in the room from the left side of the screen rapidly, and moves quickly.", "type": "action", "weight": 3 }, { "content": "The man puts the things in the cupboard into the suitcase.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the right side of the screen again, and the woman puts her hands behind her back, leans against the brown door frame and watches the man's movements. Suddenly, the man runs from the left side of the screen to the right side of the screen in a rush. The man holds a pile of clothes in his right hand and takes things out of the suitcase with his left hand.", "visual_elements": [ { "content": "The camera switches perspective again.", "type": "camera", "weight": 3 }, { "content": "On the right side of the screen, the woman puts her hands behind her back, leans against the brown door frame and watches the man's movements.", "type": "action", "weight": 3 }, { "content": "Suddenly, the man runs from the left side of the screen to the right side of the screen in a rush.", "type": "action", "weight": 2 }, { "content": "The man holds a pile of clothes in his right hand and takes things out of the suitcase with his left hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera quickly switches to the suitcase, and the man takes away the clothes in the suitcase with one hand rapidly. The camera quickly switches back to the close-up of the woman. The woman looks ahead, frowning at the same time. The camera quickly switched, the man is taking the clothes from the bedroom to another room on the right side of the screen. The camera switches to an angle closer to the ground, and the woman in the foreground on the right side of the screen wears a white shorts. There is a chair on the left of the woman, and a light-colored dress is on the back of the chair. The man picks up an object on the ground.", "visual_elements": [ { "content": "The camera quickly switches to the suitcase, and the man takes away the clothes in the suitcase with one hand rapidly.", "type": "camera", "weight": 3 }, { "content": "The camera quickly switches back to the close-up of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman looks ahead, frowning at the same time.", "type": "action", "weight": 3 }, { "content": "The camera quickly switched, the man is taking the clothes from the bedroom to another room on the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "The camera switches to an angle closer to the ground.", "type": "camera", "weight": 3 }, { "content": "The woman in the foreground on the right side of the screen wears a white shorts.", "type": "attribute", "weight": 3 }, { "content": "There is a chair on the left of the woman, and a light-colored dress is on the back of the chair.", "type": "scene", "weight": 1 }, { "content": "The man picks up an object on the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera finally switches to the front of the woman. The woman leans against the door frame and looks to her right. The man picks up the item and runs quickly to the chair.", "visual_elements": [ { "content": "The camera finally switches to the front of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman leans against the door frame and looks to her right.", "type": "action", "weight": 3 }, { "content": "The man picks up the item and runs quickly to the chair.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 31 }, { "index": "TUNA_0037", "video_path": "DREAM-1K/251.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.0, "resolution": { "width": 1920, "height": 816 }, "caption": "The video begins as the camera zooms in on the hands of a young woman. She unzips a beige handbag with her left hand and then rummages through the bag to look for something with her left hand. She has a flip phone in her right hand. The background light is dim.\nThe camera follows the movements of the woman's hands, and she quickly pulls out a long black line from the bag. Then the camera switches to a farther angle, showing that she bends and kneels on the ground with her legs. She has long black hair and wears a light skirt with suspenders. The woman then puts her hands on the ground and prepares to crawl. In the background, there is a row of cabinets on the left side of the woman, and some items are cluttered on the cabinet.\nThe camera switches again and closes up to the woman's hands. The woman's long hair hangs down to her arms. The camera goes across the table and captures the woman continuing to crawl forward.\nThe camera switches to the woman's hands. The woman holds a flip phone in her right hand and opens the flip phone up with her left hand. Then she sticks the back of the phone on a metal box on the brick wall with her right hand and brushes it down. Finally, the camera switches to a far angle from the woman and moves to the lower right, showing that the woman is aiming something at the bottom of the mobile phone with her right hand.", "events": [ { "event": "The video begins as the camera zooms in on the hands of a young woman. She unzips a beige handbag with her left hand and then rummages through the bag to look for something with her left hand. She has a flip phone in her right hand. The background light is dim.", "visual_elements": [ { "content": "The camera zooms in on the hands of a young woman.", "type": "camera", "weight": 3 }, { "content": "The woman unzips a beige handbag with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman rummages through the bag to look for something with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman has a flip phone in her right hand.", "type": "action", "weight": 3 }, { "content": "The background light is dim.", "type": "scene", "weight": 2 } ] }, { "event": "The camera follows the movements of the woman's hands, and she quickly pulls out a long black line from the bag. Then the camera switches to a farther angle, showing that she bends and kneels on the ground with her legs. She has long black hair and wears a light skirt with suspenders. The woman then puts her hands on the ground and prepares to crawl. In the background, there is a row of cabinets on the left side of the woman, and some items are cluttered on the cabinet.", "visual_elements": [ { "content": "The camera follows the movements of the woman's hands.", "type": "camera", "weight": 3 }, { "content": "The woman quickly pulls out a long black line from the bag.", "type": "action", "weight": 3 }, { "content": "The camera switches to a farther angle.", "type": "camera", "weight": 3 }, { "content": "The camera shows that the woman bends and kneels on the ground with her legs.", "type": "action", "weight": 3 }, { "content": "The woman has long black hair and wears a light skirt with suspenders.", "type": "attribute", "weight": 2 }, { "content": "The woman puts her hands on the ground and prepares to crawl.", "type": "action", "weight": 3 }, { "content": "There is a row of cabinets on the left side of the woman, and some items are cluttered on the cabinet.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches again and closes up to the woman's hands. The woman's long hair hangs down to her arms. The camera goes across the table and captures the woman continuing to crawl forward.", "visual_elements": [ { "content": "The camera switches again and closes up to the woman's hands.", "type": "camera", "weight": 3 }, { "content": "The woman's long hair hangs down to her arms.", "type": "attribute", "weight": 1 }, { "content": "The camera goes across the table and captures the woman continuing to crawl forward.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches to the woman's hands. The woman holds a flip phone in her right hand and opens the flip phone up with her left hand. Then she sticks the back of the phone on a metal box on the brick wall with her right hand and brushes it down. Finally, the camera switches to a far angle from the woman and moves to the lower right, showing that the woman is aiming something at the bottom of the mobile phone with her right hand.", "visual_elements": [ { "content": "The camera switches to the woman's hands.", "type": "camera", "weight": 3 }, { "content": "The woman holds a flip phone in her right hand.", "type": "action", "weight": 2 }, { "content": "The woman opens the flip phone up with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman sticks the back of the phone on a metal box on the brick wall with her right hand and brushes it down.", "type": "action", "weight": 3 }, { "content": "Finally, the camera switches to a far angle from the woman and moves to the lower right.", "type": "camera", "weight": 3 }, { "content": "The camera shows that the woman is aiming something at the bottom of the mobile phone with her right hand.", "type": "action", "weight": 1 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0038", "video_path": "DREAM-1K/257.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 4.25, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins as the camera focused on a young man in a white shirt. The camera shoots from the man's right side. It is at night, and the scene is shot outdoor on the dirt ground. The man sits in a chair, and wears decorations with colorful feathers. He has a dark cup in his left hand and wears a watch on his left wrist. He looks up and drinks from the glass. The camera then moves slightly to the upper left. A young woman sits on the man's right. An elderly woman behind them on the left is clapping, and the background is blurred.\nNext, the camera switches to a wider perspective, showing the man and the woman sitting together. The man is on the right side of the screen. He smiles and faces his left side, raises the cup, and then turns to his right side to look ahead. The woman is on the left side of the screen. She wears a white T-shirt, touching her left hand with her right hand and looking to her left. There is a four-wheeled vehicle in the background.\nThe camera switches again, and an elderly man stands on a high platform. He wears a light-colored shirt and a wide-brimmed hat, facing a young man and a young woman. He has a dark cup in his right hand and holds it up for one time. There are some colorful curtains hanging in the background. Several women are behind the old man, one of whom is holding a transparent glass bottle.\nThe camera shifts back to the young man and the young woman. The man smiles heartily, puts the cup close to his mouth, and tends to continue drinking the drinks in the cup. The woman looks at him and also smiles heartily.\nFinally, the camera switches to the close-up of the old man. He has the cup in his right hand, holds the edge of the bottom of the cup in his left hand, and looks down to the front. Five adults appear in the background. They also smile and look down to the front.", "events": [ { "event": "The video begins as the camera focused on a young man in a white shirt. The camera shoots from the man's right side. It is at night, and the scene is shot outdoor on the dirt ground. The man sits in a chair, and wears decorations with colorful feathers. He has a dark cup in his left hand and wears a watch on his left wrist. He looks up and drinks from the glass. The camera then moves slightly to the upper left. A young woman sits on the man's right. An elderly woman behind them on the left is clapping, and the background is blurred.", "visual_elements": [ { "content": "The camera focused on a young man in a white shirt, and shoots from the man's right side.", "type": "camera", "weight": 3 }, { "content": "It is at night, and the scene is shot outdoor on the dirt ground.", "type": "scene", "weight": 2 }, { "content": "The man sits in a chair, wears decorations with colorful feathers.", "type": "attribute", "weight": 2 }, { "content": "The man has a dark cup in his left hand.", "type": "action", "weight": 3 }, { "content": "The man wears a watch on his left wrist.", "type": "attribute", "weight": 2 }, { "content": "The man looks up and drinks from the glass.", "type": "action", "weight": 3 }, { "content": "The camera then moves slightly to the upper left.", "type": "camera", "weight": 3 }, { "content": "A young woman sits on the man's right.", "type": "scene", "weight": 2 }, { "content": "An elderly woman behind them on the left is clapping.", "type": "scene", "weight": 2 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera switches to a wider perspective, showing the man and the woman sitting together. The man is on the right side of the screen. He smiles and faces his left side, raises the cup, and then turns to his right side to look ahead. The woman is on the left side of the screen. She wears a white T-shirt, touching her left hand with her right hand and looking to her left. There is a four-wheeled vehicle in the background.", "visual_elements": [ { "content": "The camera switches to a wider perspective, showing the man and the woman sitting together.", "type": "camera", "weight": 3 }, { "content": "The man is on the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "The man smiles and faces his left side, raises the cup.", "type": "action", "weight": 3 }, { "content": "The man turns to his right side to look ahead.", "type": "action", "weight": 3 }, { "content": "The woman is on the left side of the screen.", "type": "camera", "weight": 3 }, { "content": "The woman wears a white T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The woman touches her left hand with her right hand and looks to her left.", "type": "action", "weight": 3 }, { "content": "There is a four-wheeled vehicle in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches again, and an elderly man stands on a high platform. He wears a light-colored shirt and a wide-brimmed hat, facing a young man and a young woman. He has a dark cup in his right hand and holds it up for one time. There are some colorful curtains hanging in the background. Several women are behind the old man, one of whom is holding a transparent glass bottle.", "visual_elements": [ { "content": "The camera switches again. An elderly man stands on a high platform.", "type": "camera", "weight": 3 }, { "content": "The elderly man wears a light-colored shirt and a wide-brimmed hat, facing a young man and a young woman.", "type": "attribute", "weight": 2 }, { "content": "The elderly man has a dark cup in his right hand and holds it up for one time.", "type": "action", "weight": 3 }, { "content": "There are some colorful curtains hanging in the background.", "type": "scene", "weight": 2 }, { "content": "Several women are behind the old man.", "type": "scene", "weight": 2 }, { "content": "One of the women is holding a transparent glass bottle.", "type": "action", "weight": 1 } ] }, { "event": "The camera shifts back to the young man and the young woman. The man smiles heartily, puts the cup close to his mouth, and tends to continue drinking the drinks in the cup. The woman looks at him and also smiles heartily.", "visual_elements": [ { "content": "The camera shifts back to the young man and the young woman.", "type": "camera", "weight": 3 }, { "content": "The man smiles heartily, puts the cup close to his mouth.", "type": "action", "weight": 3 }, { "content": "The woman looks at him and also smiles heartily.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera switches to the close-up of the old man. He has the cup in his right hand, holds the edge of the bottom of the cup in his left hand, and looks down to the front. Five adults appear in the background. They also smile and look down to the front.", "visual_elements": [ { "content": "Finally, the camera switches to the close-up of the old man.", "type": "camera", "weight": 3 }, { "content": "The elderly man has the cup in his right hand, holds the edge of the bottom of the cup in his left hand.", "type": "action", "weight": 3 }, { "content": "The elderly man looks down to the front.", "type": "action", "weight": 3 }, { "content": "Five adults appear in the background.", "type": "scene", "weight": 2 }, { "content": "The five adults also smile and look down to the front.", "type": "action", "weight": 1 } ] } ], "n_events": 5, "n_elements": 32 }, { "index": "TUNA_0039", "video_path": "DREAM-1K/258.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.5, "resolution": { "width": 1840, "height": 1024 }, "caption": "The video begins as the camera shakes slightly and shoots from the side of a middle-aged man lying on a hospital bed. He wears a white coat with dark blue spots, lying on a light blue pillow with his eyes closed. He has a transparent hose inserted in his right nostril. The background is blurred. The screen is the interior of a room, and the wall of the room is light yellow. A man on the left side of the hospital bed is looking at the man that lying on the hospital bed.\nNext, the camera switches to the hospital corridor, showing a middle-aged man wearing a black jacket and dark pants walking towards the camera. The man in the black jacket holds a white bag with the words \"PINK'S\" printed on it in his left hand. On his right is a room number plate marked \"714\" on the wall.\nThe camera follows the man. He walks to the door of Room 714, turns his head to his right and looks into the room. In the room, the man on the hospital bed lays on his back with his eyes closed, and above his head is a long white light. There is a lower cabinet on the left side of the hospital bed with a white desk lamp and a paper cup on it. On the right side of the hospital bed are medical equipments and curtains. The man in the black jacket then turns his head to the left and looks out of the door.\nThen, the camera switches and follows the black jacket man to the left front of the screen. A young woman appears from the left side of the camera and walks towards the man. Behind the woman is a blue locker with some items on it. There is a medical device behind her on the left. The woman wears a police uniform, and her right arm is in plaster and fixed with a white sling. She smiles at the man. After seeing the woman, the man looks down at the bag in his hand and then looks at the woman again.", "events": [ { "event": "The video begins as the camera shakes slightly and shoots from the side of a middle-aged man lying on a hospital bed. He wears a white coat with dark blue spots, lying on a light blue pillow with his eyes closed. He has a transparent hose inserted in his right nostril. The background is blurred. The screen is the interior of a room, and the wall of the room is light yellow. A man on the left side of the hospital bed is looking at the man that lying on the hospital bed.", "visual_elements": [ { "content": "The camera shakes slightly and shoots from the side of a middle-aged man lying on a hospital bed.", "type": "camera", "weight": 3 }, { "content": "The man wears a white coat with dark blue spots, lying on a light blue pillow with his eyes closed.", "type": "attribute", "weight": 2 }, { "content": "The man has a transparent hose inserted in his right nostril.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 }, { "content": "The screen is the interior of a room.", "type": "attribute", "weight": 2 }, { "content": "The wall of the room is light yellow.", "type": "scene", "weight": 2 }, { "content": "A man on the left side of the hospital bed is looking at the man that lying on the hospital bed.", "type": "action", "weight": 1 } ] }, { "event": "Next, the camera switches to the hospital corridor, showing a middle-aged man wearing a black jacket and dark pants walking towards the camera. The man in the black jacket holds a white bag with the words \"PINK'S\" printed on it in his left hand. On his right is a room number plate marked \"714\" on the wall.", "visual_elements": [ { "content": "The camera switches to the hospital corridor.", "type": "camera", "weight": 3 }, { "content": "The camera shows a middle-aged man who wears a black jacket and dark pants walking towards the camera.", "type": "action", "weight": 3 }, { "content": "The man in the black jacket holds a white bag in his left hand.", "type": "action", "weight": 3 }, { "content": "The words \"PINK'S\" are printed on the bag.", "type": "attribute", "weight": 2 }, { "content": "On the rightof the man in the black jacket is a room number plate marked \"714\" on the wall.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera follows the man. He walks to the door of Room 714, turns his head to his right and looks into the room. In the room, the man on the hospital bed lays on his back with his eyes closed, and above his head is a long white light. There is a lower cabinet on the left side of the hospital bed with a white desk lamp and a paper cup on it. On the right side of the hospital bed are medical equipments and curtains. The man in the black jacket then turns his head to the left and looks out of the door.", "visual_elements": [ { "content": "The camera follows the man in the black jacket.", "type": "camera", "weight": 3 }, { "content": "The man in the black jacket walks to the door of Room 714, turns his head to his right and looks into the room.", "type": "camera", "weight": 3 }, { "content": "In the room, the man on the hospital bed lays on his back with his eyes closed.", "type": "attribute", "weight": 2 }, { "content": "Above the head of the man on the hospital bed is a long white light.", "type": "scene", "weight": 2 }, { "content": "There is a lower cabinet on the left side of the hospital bed with a white desk lamp and a paper cup on it.", "type": "scene", "weight": 2 }, { "content": "On the right side of the hospital bed are medical equipments and curtains.", "type": "scene", "weight": 2 }, { "content": "The man in the black jacket then turns his head to the left and looks out of the door.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches and follows the black jacket man to the left front of the screen. A young woman appears from the left side of the camera and walks towards the man. Behind the woman is a blue locker with some items on it. There is a medical device behind her on the left. The woman wears a police uniform, and her right arm is in plaster and fixed with a white sling. She smiles at the man. After seeing the woman, the man looks down at the bag in his hand and then looks at the woman again.", "visual_elements": [ { "content": "The camera switches and follows the black jacket man to the left front of the screen.", "type": "camera", "weight": 3 }, { "content": "A young woman appears from the left side of the camera and walks towards the man.", "type": "action", "weight": 3 }, { "content": "Behind the woman is a blue locker with some items on it.", "type": "scene", "weight": 2 }, { "content": "There is a medical device behind her on the left.", "type": "scene", "weight": 2 }, { "content": "The woman wears a police uniform, and her right arm is in plaster and fixed with a white sling.", "type": "attribute", "weight": 2 }, { "content": "The woman smiles at the man.", "type": "action", "weight": 3 }, { "content": "After seeing the woman, the man looks down at the bag in his hand and then looks at the woman again.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 26 }, { "index": "TUNA_0040", "video_path": "DREAM-1K/260.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.0, "resolution": { "width": 1840, "height": 1024 }, "caption": "The video begins as the camera zooms in on the front of a motorcycle rider. He rides on a motorcycle, wearing a blue and black motorcycle suit, a helmet and black gloves. The background is a modern bridge with many pedestrians on it. The sky is blue and the sun is shining. The rider waves his left hand to his left to signal the pedestrians in front to get out of the way. The motorcycle runs fast and the scene behind the rider quickly passes behind him.\nNext, the camera switches to a male pedestrian. He wears a brown coat and a blue shirt, holds a selfie stick in his left hand, and brushes his hair to his right with his right hand. He lowers his head and smiles at the selfie stick. There are many other pedestrians walking in the background with their backs to the camera. A female pedestrian with long hair is walking towards the male pedestrian.\nThen, the camera switches to a farther angle to shoot the male pedestrian, showing that he is standing in the middle of the road looking at the selfie stick. Suddenly, a motorcycle rider in red raises the front wheel of the motorcycle and runs straight towards the man at a high speed. A motorcycle rider in a blue and black motorcycle suit follows closely behind the motorcycle rider in red. In the background, a male pedestrian in a blue shirt and a hat is on the left side of the screen, and prepares to walk from one end of the bridge to the other. He suddenly stops to avoid the motorcycle. He turns to his right and looks forward.\nThe camera switches to the right side of a flight of stairs and zooms in gradually. It shows two riders riding down from the top of the stairs respectively. The passengers turn around successively to avoid the motorcycles moving downwards. On the platform above the stairs is a brand shop named Restoration Hardware.", "events": [ { "event": "The video begins as the camera zooms in on the front of a motorcycle rider. He rides on a motorcycle, wearing a blue and black motorcycle suit, a helmet and black gloves. The background is a modern bridge with many pedestrians on it. The sky is blue and the sun is shining. The rider waves his left hand to his left to signal the pedestrians in front to get out of the way. The motorcycle runs fast and the scene behind the rider quickly passes behind him.", "visual_elements": [ { "content": "The video begins as the camera zooms in on the front of a motorcycle rider.", "type": "camera", "weight": 3 }, { "content": "The rider rides on a motorcycle, wearing a blue and black motorcycle suit.", "type": "attribute", "weight": 2 }, { "content": "The rider wears a helmet and black gloves.", "type": "attribute", "weight": 2 }, { "content": "The background is a modern bridge with many passengers on it.", "type": "scene", "weight": 3 }, { "content": "The sky is blue and the sun is shining.", "type": "scene", "weight": 2 }, { "content": "The rider waves his left hand to his left.", "type": "action", "weight": 3 }, { "content": "The motorcycle runs fast.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to a male pedestrian. He wears a brown coat and a blue shirt, holds a selfie stick in his left hand, and brushes his hair to his right with his right hand. He lowers his head and smiles at the selfie stick. There are many other pedestrians walking in the background with their backs to the camera. A female pedestrian with long hair is walking towards the male pedestrian.", "visual_elements": [ { "content": "The camera switches to a male pedestrian.", "type": "camera", "weight": 3 }, { "content": "The male pedestrian wears a brown coat and a blue shirt.", "type": "attribute", "weight": 2 }, { "content": "The male pedestrian holds a selfie stick in his left hand, brushes his hair to his right with his right hand.", "type": "action", "weight": 3 }, { "content": "The male pedestrian lowers his head and smiles at the selfie stick.", "type": "action", "weight": 3 }, { "content": "There are many other pedestrians walking in the background with their backs to the camera.", "type": "scene", "weight": 2 }, { "content": "A female pedestrian with long hair is walking towards the male pedestrian", "type": "action", "weight": 1 } ] }, { "event": "Then, the camera switches to a farther angle to shoot the male pedestrian, showing that he is standing in the middle of the road looking at the selfie stick. Suddenly, a motorcycle rider in red raises the front wheel of the motorcycle and runs straight towards the man at a high speed. A motorcycle rider in a blue and black motorcycle suit follows closely behind the motorcycle rider in red. In the background, a male pedestrian in a blue shirt and a hat is on the left side of the screen, and prepares to walk from one end of the bridge to the other. He suddenly stops to avoid the motorcycle. He turns to his right and looks forward.", "visual_elements": [ { "content": "Then, the camera switches to a farther angle to shoot the male pedestrian.", "type": "camera", "weight": 3 }, { "content": "The camera shows that the male pedestrian is standing in the middle of the road looking at the selfie stick.", "type": "camera", "weight": 3 }, { "content": "Suddenly, a motorcycle rider in red raises the front wheel of the motorcycle and runs straight towards the man at a high speed.", "type": "action", "weight": 3 }, { "content": "A motorcycle rider in a blue and black motorcycle suit follows closely behind the motorcycle rider in red.", "type": "action", "weight": 3 }, { "content": "In the background, a male pedestrian in a blue shirt and a hat is on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "The male pedestrian in the blue shirt and a hat suddenly stops to avoid the motorcycle.", "type": "action", "weight": 1 }, { "content": "The male pedestrian in the blue shirt and a hat turns to his right and looks forward.", "type": "action", "weight": 1 } ] }, { "event": "The camera switches to the right side of a flight of stairs and zooms in gradually. It shows two riders riding down from the top of the stairs respectively. The passengers turn around successively to avoid the motorcycles moving downwards. On the platform above the stairs is a brand shop named Restoration Hardware.", "visual_elements": [ { "content": "The camera switches to the right side of a flight of stairs and zooms in gradually.", "type": "camera", "weight": 3 }, { "content": "It shows two riders riding down from the top of the stairs respectively.", "type": "action", "weight": 3 }, { "content": "The passengers on the stairs turn around successively to avoid the motorcycles moving downwards.", "type": "action", "weight": 2 }, { "content": "On the platform above the stairs is a brand shop named Restoration Hardware.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0041", "video_path": "DREAM-1K/261.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 7.5, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera shakes violently up and down and features a young man inside a car. He wears a hat and dark clothes and he is driving the car. Outside the window, the city streets and some buildings are flying past.\nThe camera then switches to the outside of the car and clearly aims at a street. On the street, a young man and a young woman are walking on the sidewalk with their backs to the camera. They prepare to walk across the road. The woman has long blond hair. She wears a colorful coat and black trousers. The man wears a red coat and black trousers. The camera swirls quickly to the right. In front of them, a black car quickly drives to the right of the screen, turning sharply in the direction of the camera. White smoke comes out from behind the car.\nThe camera switches to the front view of the car, showing the car turning sharply to the right of the screen on the road. The camera quickly rotates to the right following the car's action. White smoke continues pouring out behind the car. On both sides of the road are some buildings and tall palm trees. Not far behind the car, a white police car followed, and its warning lights are flashing.\nThen, the camera switches to the overhead viewing angle, showing the panoramic view of the street. The car continues to drive rapidly on the street. On the left side of the car is a sidewalk, and a pedestrian is crossing the road. The camera again switches to the street in a head-up angle and aims at the left side of the car. The camera moves back slightly, and the car continues to drive at a high speed, followed by the police car. Two pedestrians on the left side of the road hurries to avoid them.\nAs the camera rotates rapidly to the left, a female pedestrian hurriedly pulls the male pedestrian in front of her with her right hand. Then, the camera switches to the back of a truck, and the back door of the truck opens. There are some cartons filled with fruit in the front, and the words FRESH PRODUCE are printed on the cartons. There is a man in a hat, a light coat and light blue trousers at the door. He jumps to his left into the truck. The police car drives towards the man and crashes into the back of the truck.\nFinally, the camera zooms in on the logo and headlights of the police car, showing that the police car hits the back of the truck. The police car stops after a violent vibration of the front of the car.", "events": [ { "event": "At the beginning of the video, the camera shakes violently up and down and features a young man inside a car. He wears a hat and dark clothes and he is driving the car. Outside the window, the city streets and some buildings are flying past.", "visual_elements": [ { "content": "The camera shakes violently up and down and features a young man inside a car.", "type": "camera", "weight": 3 }, { "content": "The man wears a hat and dark clothes.", "type": "attribute", "weight": 2 }, { "content": "The man is driving the car.", "type": "action", "weight": 3 }, { "content": "Outside the window, the city streets and some buildings are flying past.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then switches to the outside of the car and clearly aims at a street. On the street, a young man and a young woman are walking on the sidewalk with their backs to the camera. They prepare to walk across the road. The woman has long blond hair. She wears a colorful coat and black trousers. The man wears a red coat and black trousers. The camera swirls quickly to the right. In front of them, a black car quickly drives to the right of the screen, turning sharply in the direction of the camera. White smoke comes out from behind the car.", "visual_elements": [ { "content": "The camera then switches to the outside of the car and clearly aims at a street.", "type": "camera", "weight": 3 }, { "content": "On the street, a young man and a young woman are walking on the sidewalk with their backs to the camera.", "type": "action", "weight": 3 }, { "content": "The woman has long blond hair. She wears a colorful coat and black trousers.", "type": "attribute", "weight": 2 }, { "content": "The man wears a red coat and black trousers.", "type": "attribute", "weight": 2 }, { "content": "The camera swirls quickly to the right.", "type": "camera", "weight": 3 }, { "content": "A black car quickly drives to the right of the screen, turning sharply in the direction of the camera.", "type": "action", "weight": 3 }, { "content": "White smoke comes out from behind the car.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to the front view of the car, showing the car turning sharply to the right of the screen on the road. The camera quickly rotates to the right following the car's action. White smoke continues pouring out behind the car. On both sides of the road are some buildings and tall palm trees. Not far behind the car, a white police car followed, and its warning lights are flashing.", "visual_elements": [ { "content": "The camera switches to the front view of the car, showing the car turning sharply to the right of the screen on the road.", "type": "camera", "weight": 3 }, { "content": "The camera quickly rotates to the right following the car's action.", "type": "camera", "weight": 3 }, { "content": "White smoke continues pouring out behind the car.", "type": "attribute", "weight": 1 }, { "content": "On both sides of the road are some buildings and tall palm trees.", "type": "scene", "weight": 2 }, { "content": "Not far behind the car, a white police car followed.", "type": "action", "weight": 3 }, { "content": "The warning lights of the police car are flashing.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to the overhead viewing angle, showing the panoramic view of the street. The car continues to drive rapidly on the street. On the left side of the car is a sidewalk, and a pedestrian is crossing the road. The camera again switches to the street in a head-up angle and aims at the left side of the car. The camera moves back slightly, and the car continues to drive at a high speed, followed by the police car. Two pedestrians on the left side of the road hurries to avoid them.", "visual_elements": [ { "content": "The camera switches to the overhead viewing angle, showing the panoramic view of the street.", "type": "camera", "weight": 3 }, { "content": "The car continues to drive rapidly on the street.", "type": "action", "weight": 2 }, { "content": "On the left side of the car is a sidewalk.", "type": "scene", "weight": 2 }, { "content": "A pedestrian is crossing the road.", "type": "action", "weight": 1 }, { "content": "The camera again switches to the street in a head-up angle and aims at the left side of the car.", "type": "camera", "weight": 3 }, { "content": "The camera moves back slightly.", "type": "camera", "weight": 3 }, { "content": "The car continues to drive at a high speed.", "type": "action", "weight": 1 }, { "content": "The police car continues to follow the car from not far behind.", "type": "action", "weight": 2 }, { "content": "Two pedestrians on the left side of the road hurries to avoid them.", "type": "action", "weight": 1 } ] }, { "event": "As the camera rotates rapidly to the left, a female pedestrian hurriedly pulls the male pedestrian in front of her with her right hand. Then, the camera switches to the back of a truck, and the back door of the truck opens. There are some cartons filled with fruit in the front, and the words FRESH PRODUCE are printed on the cartons. There is a man in a hat, a light coat and light blue trousers at the door. He jumps to his left into the truck. The police car drives towards the man and crashes into the back of the truck.", "visual_elements": [ { "content": "As the camera rotates rapidly to the left, a female pedestrian hurriedly pulls the male pedestrian in front of her with her right hand.", "type": "camera", "weight": 3 }, { "content": "The camera switches to the back of a truck.", "type": "camera", "weight": 3 }, { "content": "The back door of the truck opens.", "type": "attribute", "weight": 2 }, { "content": "There are some cartons filled with fruit in the front of the truck.", "type": "scene", "weight": 2 }, { "content": "The words FRESH PRODUCE are printed on the cartons.", "type": "attribute", "weight": 1 }, { "content": "There is a man in a hat, a light coat and light blue trousers at the door.", "type": "attribute", "weight": 2 }, { "content": "The man in the light coat jumps to his left into the truck.", "type": "action", "weight": 3 }, { "content": "The police car drives towards the man in the light coat and crashes into the back of the truck.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera zooms in on the logo and headlights of the police car, showing that the police car hits the back of the truck. The police car stops after a violent vibration of the front of the car.", "visual_elements": [ { "content": "The camera zooms in on the logo and headlights of the police car.", "type": "camera", "weight": 3 }, { "content": "The police car hits the back of the truck.", "type": "action", "weight": 3 }, { "content": "The police car stops after a violent vibration of the front of the car.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 37 }, { "index": "TUNA_0042", "video_path": "DREAM-1K/263.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 3.88, "resolution": { "width": 1840, "height": 1024 }, "caption": "The video begins as the camera zooms in on an empty road. The sky is full of clouds. On both sides of the road are desolate bushes and low hills. The camera shoots up from the ground, and the yellow lane line of the road can be seen clearly. A motorcycle on the right side of the camera is riding out of the screen in the direction of the camera rapidly. After that, the second motorcycle on the right side of the lane line quickly comes to the camera from the distance. On the left side of the lane, a blue car is driving in the direction of the camera.\nThe camera switches to the front of a motorcycle rider. The rider is an elderly man with a short white beard. He wears a white helmet, a pair of black sunglasses, and a black leather jacket. He holds the handle of the motorcycle in his right hand and a black gun in the direction of the camera in his left hand. There are two road signs and bushes in the background, showing that he is driving on a remote road. The camera shakes and moves back slightly. The elderly rider looks forward to the left, takes an aim and fires.\nThe camera switches to the longshot of the highway. Three motorcycles come from a distance, two of which are driving side by side in the front. On the right side of the lane line, there is a silver car driving with its back to the camera. The two motorcycle riders in the front wear dark riding clothes and helmets, and they are riding in a steady posture.\nSuddenly, on the left side of the road, a clump of leaves at the top of a shrub is knocked off and falls on the ground, stirring a lot of dust and debris. The two riders in the front of the road hurriedly bow their heads and turn to their left to avoid the mass.", "events": [ { "event": "The video begins as the camera zooms in on an empty road. The sky is full of clouds. On both sides of the road are desolate bushes and low hills. The camera shoots up from the ground, and the yellow lane line of the road can be seen clearly. A motorcycle on the right side of the camera is riding out of the screen in the direction of the camera rapidly. After that, the second motorcycle on the right side of the lane line quickly comes to the camera from the distance. On the left side of the lane, a blue car is driving in the direction of the camera.", "visual_elements": [ { "content": "The camera zooms in on an empty road.", "type": "camera", "weight": 3 }, { "content": "The sky is full of clouds.", "type": "scene", "weight": 2 }, { "content": "On both sides of the road are desolate bushes and low hills.", "type": "scene", "weight": 2 }, { "content": "The camera shoots up from the ground", "type": "camera", "weight": 3 }, { "content": "The yellow lane line of the road can be seen clearly.", "type": "attribute", "weight": 2 }, { "content": "A motorcycle on the right side of the camera is riding out of the screen in the direction of the camera rapidly.", "type": "action", "weight": 3 }, { "content": "The second motorcycle on the right side of the lane line quickly comes to the camera from the distance.", "type": "action", "weight": 3 }, { "content": "On the left side of the lane, a blue car is driving in the direction of the camera.", "type": "action", "weight": 1 } ] }, { "event": "The camera switches to the front of a motorcycle rider. The rider is an elderly man with a short white beard. He wears a white helmet, a pair of black sunglasses, and a black leather jacket. He holds the handle of the motorcycle in his right hand and a black gun in the direction of the camera in his left hand. There are two road signs and bushes in the background, showing that he is driving on a remote road. The camera shakes and moves back slightly. The elderly rider looks forward to the left, takes an aim and fires.", "visual_elements": [ { "content": "The camera switches to the front of a motorcycle rider.", "type": "camera", "weight": 3 }, { "content": "The rider is an elderly man with a short white beard.", "type": "attribute", "weight": 2 }, { "content": "The rider wears a white helmet, a pair of black sunglasses, and a black leather jacket.", "type": "attribute", "weight": 2 }, { "content": "The rider holds the handle of the motorcycle in his right hand and a black gun in the direction of the camera in his left hand.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 }, { "content": "There are two road signs and bushes in the background.", "type": "scene", "weight": 2 }, { "content": "The driver is driving on a remote road.", "type": "attribute", "weight": 2 }, { "content": "The camera shakes and moves back slightly.", "type": "camera", "weight": 3 }, { "content": "The elderly rider looks forward to the left, takes an aim and fires.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the longshot of the highway. Three motorcycles come from a distance, two of which are driving side by side in the front. On the right side of the lane line, there is a silver car driving with its back to the camera. The two motorcycle riders in the front wear dark riding clothes and helmets, and they are riding in a steady posture.", "visual_elements": [ { "content": "The camera switches to the longshot of the highway.", "type": "camera", "weight": 3 }, { "content": "Three motorcycles come from a distance.", "type": "action", "weight": 3 }, { "content": "Two of the three motorcycles are driving side by side in the front.", "type": "action", "weight": 3 }, { "content": "On the right side of the lane line, there is a silver car driving with its back to the camera.", "type": "action", "weight": 1 }, { "content": "The two motorcycle riders in the front wear dark riding clothes and helmets, and they are riding in a steady posture.", "type": "attribute", "weight": 2 } ] }, { "event": "Suddenly, on the left side of the road, a clump of leaves at the top of a shrub is knocked off and falls on the ground, stirring a lot of dust and debris. The two riders in the front of the road hurriedly bow their heads and turn to their left to avoid the mass.", "visual_elements": [ { "content": "On the side of the road, a clump of leaves at the top of a shrub is knocked off and falls on the ground, stirring a lot of dust and debris.", "type": "scene", "weight": 2 }, { "content": "The two riders in the front of the road hurriedly bow their heads and turn to their left to avoid the mass.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0043", "video_path": "DREAM-1K/264.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 3.12, "resolution": { "width": 1840, "height": 1024 }, "caption": "The video begins as the camera zooms in on the close-up of the back of a person who wears dark green clothes. The speed of the movement of the camera is slow. Another person pushes his back from the right of his back with his hand, his fingers widely spread.\nNext, the camera switches to a close-up. A muscular arm reaches towards a young man's face. The hand makes a fist and hits the young man on his right face. The speed of moving the camera is slow, and the face of the man who is hit shocks violently.\nThen, the camera switches to a wider angle of view, and the camera shakes and moves to the right, The speed of moving the camera returns to normal. In the screen, a man wearing a short-sleeved grey shirt punches another man wearing similar clothes with his right hand. Behind them, a middle-aged man in a dark blue coat hurriedly whistles to motion the attacker to stop. The beaten man turns his head to the right back and falls to the ground. The background is a spacious indoor room with three large windows on the top edge of the wall. The light outside the window is bright. Some people around are watching.\nFinally, the camera switches to the close-up of the beaten man. The beaten man suddenly falls to the blue floor on his back and his body shakes to the right side. His face first faces the right side of himself and then directly faces the ceiling above, and his eyes are closed.", "events": [ { "event": "The video begins as the camera zooms in on the close-up of the back of a person who wears dark green clothes. The speed of the movement of the camera is slow. Another person pushes his back from the right of his back with his hand, his fingers widely spread.", "visual_elements": [ { "content": "The camera zooms in on the close-up of the back of a person who wears dark green clothes. The speed of the movement of the camera is slow.", "type": "camera", "weight": 3 }, { "content": "Another person pushes his back from the right of his back with his hand, his fingers widely spread.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to a close-up. A muscular arm reaches towards a young man's face. The hand makes a fist and hits the young man on his right face. The speed of moving the camera is slow, and the face of the man who is hit shocks violently.", "visual_elements": [ { "content": "Next, the camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "A muscular arm reaches towards a young man's face. The hand makes a fist and hits the young man on his right face.", "type": "action", "weight": 3 }, { "content": "The speed of moving the camera is slow.", "type": "camera", "weight": 3 }, { "content": "and the face of the man who is hit shocks violently.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to a wider angle of view, and the camera shakes and moves to the right, The speed of moving the camera returns to normal. In the screen, a man wearing a short-sleeved grey shirt punches another man wearing similar clothes with his right hand. Behind them, a middle-aged man in a dark blue coat hurriedly whistles to motion the attacker to stop. The beaten man turns his head to the right back and falls to the ground. The background is a spacious indoor room with three large windows on the top edge of the wall. The light outside the window is bright. Some people around are watching.", "visual_elements": [ { "content": "The camera switches to a wider angle of view, and the camera shakes and moves to the right, The speed of moving the camera returns to normal.", "type": "camera", "weight": 3 }, { "content": "A man wearing a short-sleeved grey shirt punches another man wearing similar clothes with his right hand swiftly.", "type": "action", "weight": 3 }, { "content": "A middle-aged man in a dark blue coat hurriedly whistles to motion the attacker to stop.", "type": "action", "weight": 2 }, { "content": "The beaten man turns his head to the right back and falls to the ground.", "type": "action", "weight": 3 }, { "content": "The background is a spacious indoor room.", "type": "scene", "weight": 2 }, { "content": "Three large windows are on the top edge of the wall.", "type": "attribute", "weight": 2 }, { "content": "The light outside the window is bright.", "type": "scene", "weight": 2 }, { "content": "Some people around are watching.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera switches to the close-up of the beaten man. The beaten man suddenly falls to the blue floor on his back and his body shakes to the right side. His face first faces the right side of himself and then directly faces the ceiling above, and his eyes are closed.", "visual_elements": [ { "content": "Finally, the camera switches to the close-up of the beaten man.", "type": "camera", "weight": 3 }, { "content": "The beaten man suddenly falls to the blue floor.", "type": "action", "weight": 3 }, { "content": "The beaten man falls on his back and his body shakes to the right side.", "type": "action", "weight": 3 }, { "content": "The face of the beaten man first faces the right side of himself and then directly faces the ceiling above.", "type": "action", "weight": 3 }, { "content": "The eyes of the beaten man are closed.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0044", "video_path": "DREAM-1K/266.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 13.12, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with the camera focusing sideways on a man on a horse, showing only a very small portion of the man's side face. The camera moves slowly. He wears a black long-sleeved coat and black trousers, with an armband on his left arm. The man rides a light-coloured horse and holds the reins in his left hand. The camera moves down as the man pulls back the reins with his left hand. A thick fog surrounds the area, leading to poor visibility.\nThe camera then switches to the back of the man, who is travelling forward on horseback. A male passer-by in dark clothes is in front of him. There are railings on both sides of the road and buildings are blurred in the distance. The camera then moves down to the lower left with a slow speed and shows that the passer-by is striding forwards over a bridge. The man on horseback is fractionally behind, holding an unfurled whip in his right hand.\nThe camera then switches to show the passer-by's front face with a normal speed. The passer-by is a black man, wearing a dark windbreaker and light trousers. He looks very scared and nervous. He runs fast in the direction of the camera. Behind the black man, the man on horseback is about to catch up with the black man. The road is a dirt road, with some white smoke on the right side of the dirt road drifting towards the left side of the frame. Some trees are on both sides. In the distance, there are thick clouds in the grey sky.\nThe camera switches again to the man on horseback, showing his face with a slow speed. He is an old man, wearing a light-coloured hat. He waves a flailing whip forward in his right hand. The camera switches to the right hand side of a group of people. They are a group of black people, four men and one woman. They are running towards the front. Behind them, the man on horseback raises his whip upwards\nThe camera switches to the front of the people. The man on horseback catches up with a black man in a trench coat and is on the left of him. The man on horseback strikes his whip on the black man's back and the black man falls forward to the ground. To the left of the man on horseback, a black girl in a light-coloured dress is running in the direction of the camera. In front of the girl, two people lie on their sides on the ground.\nThe camera switches to the back of the people, showing the black man falling to the ground. Finally, the camera switches to the right side of the man on horseback. The man on horseback continues to drive forward. Behind him, the black man in the trench coat is lying head down on the ground. In the distance, several people fall to the ground and the black girl runs past the two men lying on their sides.", "events": [ { "event": "The video begins with the camera focusing sideways on a man on a horse, showing only a very small portion of the man's side face. The camera moves slowly. He wears a black long-sleeved coat and black trousers, with an armband on his left arm. The man rides a light-coloured horse and holds the reins in his left hand. The camera moves down as the man pulls back the reins with his left hand. A thick fog surrounds the area, leading to poor visibility.", "visual_elements": [ { "content": "The video begins with the camera focusing sideways on a man on a horse, showing only a very small portion of the man's side face.", "type": "camera", "weight": 3 }, { "content": "The camera moves slowly.", "type": "camera", "weight": 3 }, { "content": "The man wears a black long-sleeved coat and black trousers, with an armband on his left arm.", "type": "attribute", "weight": 2 }, { "content": "The man rides a light-coloured horse and holds the reins in his left hand.", "type": "attribute", "weight": 2 }, { "content": "The camera moves down.", "type": "camera", "weight": 3 }, { "content": "The man pulls back the reins with his left hand.", "type": "action", "weight": 3 }, { "content": "A thick fog surrounds the area, leading to poor visibility.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then switches to the back of the man, who is travelling forward on horseback. A male passer-by in dark clothes is in front of him. There are railings on both sides of the road and buildings are blurred in the distance. The camera then moves down to the lower left with a slow speed and shows that the passer-by is striding forwards over a bridge. The man on horseback is fractionally behind, holding an unfurled whip in his right hand.", "visual_elements": [ { "content": "The camera switches to the back of the man.", "type": "camera", "weight": 3 }, { "content": "The man is travelling forward on horseback.", "type": "action", "weight": 2 }, { "content": "A male passer-by in dark clothes is in front of the man on horseback.", "type": "action", "weight": 3 }, { "content": "There are railings on both sides of the road and buildings are blurred in the distance.", "type": "scene", "weight": 2 }, { "content": "The camera moves down to the lower left with a slow speed.", "type": "camera", "weight": 3 }, { "content": "The passer-by is striding forwards over a bridge.", "type": "action", "weight": 3 }, { "content": "The man on horseback is fractionally behind, holding an unfurled whip in his right hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera then switches to show the passer-by's front face with a normal speed. The passer-by is a black man, wearing a dark windbreaker and light trousers. He looks very scared and nervous. He runs fast in the direction of the camera. Behind the black man, the man on horseback is about to catch up with the black man. The road is a dirt road, with some white smoke on the right side of the dirt road drifting towards the left side of the frame. Some trees are on both sides. In the distance, there are thick clouds in the grey sky.", "visual_elements": [ { "content": "The camera switches to show the passer-by's front face with a normal speed.", "type": "camera", "weight": 3 }, { "content": "The passer-by is a black man, wearing a dark windbreaker and light trousers.", "type": "attribute", "weight": 2 }, { "content": "The black man looks very scared and nervous.", "type": "attribute", "weight": 2 }, { "content": "The black man runs fast in the direction of the camera.", "type": "action", "weight": 2 }, { "content": "The man on horseback is about to catch up with the black man.", "type": "action", "weight": 3 }, { "content": "The road is a dirt road.", "type": "attribute", "weight": 2 }, { "content": "Some white smoke is on the right side of the dirt road drifting towards the left side of the frame.", "type": "scene", "weight": 2 }, { "content": "Some trees are on both sides.", "type": "scene", "weight": 2 }, { "content": "In the distance, there are thick clouds in the grey sky.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches again to the man on horseback, showing his face with a slow speed. He is an old man, wearing a light-coloured hat. He waves a flailing whip forward in his right hand. The camera switches to the right hand side of a group of people. They are a group of black people, four men and one woman. They are running towards the front. Behind them, the man on horseback raises his whip upwards", "visual_elements": [ { "content": "The camera switches again to the man on horseback, showing his face with a slow speed.", "type": "camera", "weight": 3 }, { "content": "The man on horseback is an old man, wearing a light-coloured hat.", "type": "camera", "weight": 3 }, { "content": "The man on horseback waves a flailing whip forward in his right hand.", "type": "action", "weight": 3 }, { "content": "The camera switches to the right hand side of a group of people.", "type": "camera", "weight": 3 }, { "content": "They are a group of black people, four men and one woman.", "type": "attribute", "weight": 2 }, { "content": "The five black people are running towards the front.", "type": "action", "weight": 3 }, { "content": "Behind the five black people, the man on horseback raises his whip upwards.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the front of the people. The man on horseback catches up with a black man in a trench coat and is on the left of him. The man on horseback strikes his whip on the black man's back and the black man falls forward to the ground. To the left of the man on horseback, a black girl in a light-coloured dress is running in the direction of the camera. In front of the girl, two people lie on their sides on the ground.", "visual_elements": [ { "content": "The camera switches to the front of the people.", "type": "camera", "weight": 3 }, { "content": "The man on horseback catches up with a black man in a trench coat and is on the left of him.", "type": "attribute", "weight": 2 }, { "content": "The man on horseback strikes his whip on the black man's back.", "type": "action", "weight": 3 }, { "content": "The black man falls forward to the ground.", "type": "action", "weight": 3 }, { "content": "To the left of the man on horseback, a black girl in a light-coloured dress is running in the direction of the camera.", "type": "action", "weight": 1 }, { "content": "In front of the girl, two people lie on their sides on the ground.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to the back of the people, showing the black man falling to the ground. Finally, the camera switches to the right side of the man on horseback. The man on horseback continues to drive forward. Behind him, the black man in the trench coat is lying head down on the ground. In the distance, several people fall to the ground and the black girl runs past the two men lying on their sides.", "visual_elements": [ { "content": "The camera switches to the back of the people, showing the black man falling to the ground.", "type": "camera", "weight": 3 }, { "content": "Finally, the camera switches to the right side of the man on horseback.", "type": "camera", "weight": 3 }, { "content": "The man on horseback continues to drive forward.", "type": "action", "weight": 1 }, { "content": "The black man in the trench coat is lying head down on the ground.", "type": "action", "weight": 3 }, { "content": "In the distance, several people fall to the ground.", "type": "action", "weight": 1 }, { "content": "The black girl runs past the two men lying on their sides.", "type": "action", "weight": 1 } ] } ], "n_events": 6, "n_elements": 42 }, { "index": "TUNA_0045", "video_path": "DREAM-1K/269.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 12.62, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with a close-up of a man's front face. He is a black man wearing black rimmed glasses and holding a black telephone receiver in his left hand. He looks down in a very focused expression. The background is blurred and some of the people behind the man are also holding phones in their hands.\nNext, the camera switches to a piece of paper with lines of words and red circles and slashes drawn on the words at the bottom of the paper. A hand is holding a red pen and moving to the upper of the paper. The camera follows the hand as it moves upwards. The hand then stops at the top of the paper and swings the pen to the right.\nThe camera switches back to the man again. He looks up to his left and then continues to gaze down. The camera switches back to the paper again. The hand raises upwards a little. The camera switches closer to the paper and shows that the hand circles the words \"PENSION FUND CEO Walter Ribbon\" in red pen.\nThe camera switches to just below the man's face and then follows the man's hand movements quickly to the right. The man wears a white clothes with a tie on his chest. He quickly extends his right hand out forward and places it on a black telephone to dial a number.\nFinally, the camera shifts and moves to the upper left. The frame is blurred. Someone walks past the man's front right side towards the right side of the frame. The man continues to hold the phone in his left hand.", "events": [ { "event": "The video begins with a close-up of a man's front face. He is a black man wearing black rimmed glasses and holding a black telephone receiver in his left hand. He looks down in a very focused expression. The background is blurred and some of the people behind the man are also holding phones in their hands.", "visual_elements": [ { "content": "It is a close-up of a man's front face.", "type": "camera", "weight": 3 }, { "content": "The man is a black man wearing black rimmed glasses and holding a black telephone receiver in his left hand.", "type": "attribute", "weight": 2 }, { "content": "He looks down in a very focused expression.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 }, { "content": "Some of the people behind the man are also holding phones in their hands.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera switches to a piece of paper with lines of words and red circles and slashes drawn on the words at the bottom of the paper. A hand is holding a red pen and moving to the upper of the paper. The camera follows the hand as it moves upwards. The hand then stops at the top of the paper and swings the pen to the right.", "visual_elements": [ { "content": "The camera switches to a piece of paper.", "type": "camera", "weight": 3 }, { "content": "There are lines of words on the paper and red circles and slashes drawn on the words at the bottom of the paper.", "type": "attribute", "weight": 2 }, { "content": "A hand is holding a red pen and moving to the upper of the paper.", "type": "action", "weight": 3 }, { "content": "The camera follows the hand as it moves upwards.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches back to the man again. He looks up to his left and then continues to gaze down. The camera switches back to the paper again. The hand raises upwards a little. The camera switches closer to the paper and shows that the hand circles the words \"PENSION FUND CEO Walter Ribbon\" in red pen.", "visual_elements": [ { "content": "The camera switches back to the man again.", "type": "camera", "weight": 3 }, { "content": "He looks up to his left and then continues to gaze down.", "type": "action", "weight": 3 }, { "content": "The camera switches back to the paper again.", "type": "camera", "weight": 3 }, { "content": "The hand raises upwards a little.", "type": "action", "weight": 3 }, { "content": "The camera switches closer to the paper.", "type": "camera", "weight": 3 }, { "content": "The hand circles the words \"PENSION FUND CEO Walter Ribbon\" in red pen.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to just below the man's face and then follows the man's hand movements quickly to the right. The man wears a white clothes with a tie on his chest. He quickly extends his right hand out forward and places it on a black telephone to dial a number.", "visual_elements": [ { "content": "The camera switches to just below the man's face and then follows the man's hand movements quickly to the right.", "type": "camera", "weight": 3 }, { "content": "The man wears a white clothes with a tie on his chest.", "type": "attribute", "weight": 2 }, { "content": "He quickly extends his right hand out forward and places it on a black telephone to dial a number.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera shifts and moves to the upper left. The frame is blurred. Someone walks past the man's front right side towards the right side of the frame. The man continues to hold the phone in his left hand.", "visual_elements": [ { "content": "The camera shifts and moves to the upper left.", "type": "camera", "weight": 3 }, { "content": "The frame is blurred.", "type": "attribute", "weight": 2 }, { "content": "Someone walks past the man's front right side towards the right side of the frame.", "type": "action", "weight": 1 }, { "content": "The man continues to hold the phone in his left hand.", "type": "attribute", "weight": 1 } ] } ], "n_events": 5, "n_elements": 22 }, { "index": "TUNA_0046", "video_path": "DREAM-1K/271.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 8.12, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with a shaky camera. A man wearing a dark suit and a white shirt sidesteps to his left and squeezes into a carriage of the BART metro. The door of the carriage is about to close.\nHe carries a white square object in his right hand. The door of the carriage closes. The man's hand carrying the square object is caught by the door and left outside the door. At the same time, a woman appears from the left side of the frame and walks psss the door to the right side of the frame.\nNext, the camera switches to the inside of the carriage. The man pulls his right hand back and struggles to get the object inside. An elderly white-haired man puts his hand on the man to help him pull in the hand that is caught by the door. In the background, many passengers are looking at the man. Finally, the camera switches again to the outside of the carriage. The train has started and is moving rapidly towards the left front of the frame.", "events": [ { "event": "The video begins with a shaky camera. A man wearing a dark suit and a white shirt sidesteps to his left and squeezes into a carriage of the BART metro. The door of the carriage is about to close.", "visual_elements": [ { "content": "The video begins with a shaky camera.", "type": "camera", "weight": 3 }, { "content": "A man wears a dark suit and a white shirt.", "type": "attribute", "weight": 2 }, { "content": "The man sidesteps to his left and squeezes into a carriage of the BART metro.", "type": "action", "weight": 3 }, { "content": "The door of the carriage is about to close.", "type": "attribute", "weight": 2 } ] }, { "event": "He carries a white square object in his right hand. The door of the carriage closes. The man's hand carrying the square object is caught by the door and left outside the door. At the same time, a woman appears from the left side of the frame and walks psss the door to the right side of the frame.", "visual_elements": [ { "content": "He carries a white square object in his right hand.", "type": "attribute", "weight": 2 }, { "content": "The door of the carriage closes.", "type": "action", "weight": 2 }, { "content": "The man's hand carrying the square object is caught by the door and left outside the door.", "type": "attribute", "weight": 2 }, { "content": "At the same time, a woman appears from the left side of the frame and walks psss the door to the right side of the frame.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera switches to the inside of the carriage. The man pulls his right hand back and struggles to get the object inside. An elderly white-haired man puts his hand on the man to help him pull in the hand that is caught by the door. In the background, many passengers are looking at the man. Finally, the camera switches again to the outside of the carriage. The train has started and is moving rapidly towards the left front of the frame.", "visual_elements": [ { "content": "The camera switches to the inside of the carriage.", "type": "camera", "weight": 3 }, { "content": "The man pulls his right hand back and struggles to get the object inside.", "type": "action", "weight": 3 }, { "content": "An elderly white-haired man puts his hand on the man to help him pull in the hand that is caught by the door.", "type": "action", "weight": 2 }, { "content": "In the background, many passengers are looking at the man.", "type": "scene", "weight": 2 }, { "content": "Finally, the camera switches again to the outside of the carriage.", "type": "camera", "weight": 3 }, { "content": "The train has started and is moving rapidly towards the left front of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0047", "video_path": "DREAM-1K/273.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 14.0, "resolution": { "width": 1840, "height": 1024 }, "caption": "The video begins with the camera filming a middle-aged man from his right side as it shakes slightly. He stands in front of a middle-aged woman and looks down at her. The background is blurred and shows the interior of a dim room. A window can be seen in the distance and the light outside is bright.\nThe camera switches to the front of the woman. She looks directly at the man with a sad expression and presses her lips. She then bows her head. The man bends to the right of the frame. The camera then zooms out as the man bends down to open a patterned fridge door. The man then takes out an item from the fridge with his left hand before he turns to leave the room with his back to the camera. Inside the room is a dining table with some cups on it and two chairs on the right side.\nThe man wears a dark green top and blue jeans with a belt around his waist. The woman stands on the left of the fridge against the wall and wears a floral dress. The man stands with his back to the camera looking out of a floor-to-ceiling window in another room as the woman holds out her hands to cover her face.", "events": [ { "event": "The video begins with the camera filming a middle-aged man from his right side as it shakes slightly. He stands in front of a middle-aged woman and looks down at her. The background is blurred and shows the interior of a dim room. A window can be seen in the distance and the light outside is bright.", "visual_elements": [ { "content": "The video begins with the camera filming a middle-aged man from his right side as it shakes slightly.", "type": "camera", "weight": 3 }, { "content": "The man stands in front of a middle-aged woman and looks down at her.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 }, { "content": "The background shows the interior of a dim room.", "type": "scene", "weight": 2 }, { "content": "A window can be seen in the distance and the light outside is bright.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to the front of the woman. She looks directly at the man with a sad expression and presses her lips. She then bows her head. The man bends to the right of the frame. The camera then zooms out as the man bends down to open a patterned fridge door. The man then takes out an item from the fridge with his left hand before he turns to leave the room with his back to the camera. Inside the room is a dining table with some cups on it and two chairs on the right side.", "visual_elements": [ { "content": "The camera switches to the front of the woman.", "type": "camera", "weight": 3 }, { "content": "She looks directly at the man with a sad expression and presses her lips.", "type": "attribute", "weight": 2 }, { "content": "The woman bows her head.", "type": "action", "weight": 3 }, { "content": "The man bends to the right of the frame.", "type": "action", "weight": 2 }, { "content": "The camera zooms out as the man bends down to open a patterned fridge door.", "type": "camera", "weight": 3 }, { "content": "The man then takes out an item from the fridge with his left hand before he turns to leave the room with his back to the camera.", "type": "action", "weight": 3 }, { "content": "Inside the room is a dining table with some cups on it and two chairs onthe right side.", "type": "attribute", "weight": 2 } ] }, { "event": "The man wears a dark green top and blue jeans with a belt around his waist. The woman stands on the left of the fridge against the wall and wears a floral dress. The man stands with his back to the camera looking out of a floor-to-ceiling window in another room as the woman holds out her hands to cover her face.", "visual_elements": [ { "content": "The man wears a dark green top and blue jeans with a belt around his waist.", "type": "attribute", "weight": 2 }, { "content": "The woman stands on the left of the fridge against the wall and wears a floral dress.", "type": "attribute", "weight": 2 }, { "content": "The man stands with his back to the camera looking out of a floor-to-ceiling window in another room.", "type": "action", "weight": 2 }, { "content": "The woman holds out her hands to cover her face.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0048", "video_path": "DREAM-1K/276.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.62, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera shoots the front of two young men as it sways slightly upwards. The background is blurred. The man on the left side of the frame wears a black sports vest with a rolled white towel over his right shoulder. His muscular right hand holds the end of the towel. The man on the right side of the frame has curly hair and wears a black zip-up jacket with white stripes. The man in the vest has his left hand on the right shoulder of the man in the jacket. The man in the vest looks ahead him and then turns his head to his left. The man in the jacket looks ahead with a little excitement.\nNext, the camera switches to a wrestling match. A wrestler in black tights carries a wrestler in red tights. The wrestler in red tights is thrown to the ground. Behind the two wrestlers is a referee in a plaid shirt. The referee quickly runs to his front right on the left side of the frame. In the background, many people sit in the audience. At the edge of the field on the left side of the frame, six female cheerleaders dressed in yellow get down on their both knees and looks forward. There are also cheerleaders dressed in white on the right side of the frame. At the edge of the field on the right side of the frame sit four referees, three men and one woman.\nAt the end of the video, the camera switches to two young men. The man in the jacket is very excited and smiles. He quickly turns his head to his left and then looks ahead while talking.", "events": [ { "event": "At the beginning of the video, the camera shoots the front of two young men as it sways slightly upwards. The background is blurred. The man on the left side of the frame wears a black sports vest with a rolled white towel over his right shoulder. His muscular right hand holds the end of the towel. The man on the right side of the frame has curly hair and wears a black zip-up jacket with white stripes. The man in the vest has his left hand on the right shoulder of the man in the jacket. The man in the vest looks ahead him and then turns his head to his left. The man in the jacket looks ahead with a little excitement.", "visual_elements": [ { "content": "The camera shoots the front of two young men as it sways slightly upwards.", "type": "camera", "weight": 3 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 }, { "content": "The man on the left side of the frame wears a black sports vest with a rolled white towel over his right shoulder.", "type": "attribute", "weight": 2 }, { "content": "The man on the left side holds the end of the towel with his muscular right hand.", "type": "attribute", "weight": 2 }, { "content": "The man on the right side of the frame has curly hair.", "type": "attribute", "weight": 2 }, { "content": "The man on the right side wears a black zip-up jacket with white stripes.", "type": "attribute", "weight": 2 }, { "content": "The man in the vest has his left hand on the right shoulder of the man in the jacket.", "type": "attribute", "weight": 2 }, { "content": "The man in the vest looks ahead him and then turns his head to his left.", "type": "action", "weight": 3 }, { "content": "The man in the jacket looks ahead with a little excitement.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera switches to a wrestling match. A wrestler in black tights carries a wrestler in red tights. The wrestler in red tights is thrown to the ground. Behind the two wrestlers is a referee in a plaid shirt. The referee quickly runs to his front right on the left side of the frame. In the background, many people sit in the audience. At the edge of the field on the left side of the frame, six female cheerleaders dressed in yellow get down on their both knees and looks forward. There are also cheerleaders dressed in white on the right side of the frame. At the edge of the field on the right side of the frame sit four referees, three men and one woman.", "visual_elements": [ { "content": "The camera switches to a wrestling match.", "type": "camera", "weight": 3 }, { "content": "A wrestler in black tights carries a wrestler in red tights.", "type": "action", "weight": 3 }, { "content": "The wrestler in red tights is thrown to the ground.", "type": "action", "weight": 3 }, { "content": "Behind the two wrestlers is a referee in a plaid shirt.", "type": "attribute", "weight": 2 }, { "content": "The referee quickly runs to his front right on the left side of the frame.", "type": "action", "weight": 2 }, { "content": "In the background, many people sit in the audience.", "type": "scene", "weight": 2 }, { "content": "At the edge of the field on the left side of the frame, six female cheerleaders dressed in yellow get down on their both knees and look forward.", "type": "scene", "weight": 2 }, { "content": "There are also cheerleaders dressed in white on the right side of the frame.", "type": "scene", "weight": 2 }, { "content": "At the edge of the field on the right side of the frame sit four referees, three men and one woman.", "type": "scene", "weight": 2 } ] }, { "event": "At the end of the video, the camera switches to two young men. The man in the jacket is very excited and smiles. He quickly turns his head to his left and then looks ahead while talking.", "visual_elements": [ { "content": "At the end of the video, the camera switches to two young men.", "type": "camera", "weight": 3 }, { "content": "The man in the jacket is very excited and smiles.", "type": "attribute", "weight": 2 }, { "content": "He quickly turns his head to his left and then looks ahead while talking.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0049", "video_path": "DREAM-1K/278.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.25, "resolution": { "width": 1840, "height": 1024 }, "caption": "The video begins with the camera shooting closely on two male wrestlers. One wrestler is on top of another wrestler. Both wrestlers wear dark sports vests with yellow trim. The wrestler on top has black guards over his ears, while the wrestler below has blue guards over his ears. Behind them is a male referee wearing a black and white striped T-shirt and a red wristband. He is obese and has a whistle around his neck. He lies on his stomach as his left hand props on the ground and looks at the two wrestlers. He moves his body a little to his right. There is a seated female spectator in the background.\nThe camera switches to an adult man in a blue shirt. He stands with his head slightly bowed and his hands on his hips and stars forward with a serious expression. In the background, two wrestlers in black tights are painted on the wall with the word \"DOWN\" underneath.\nThe camera switches to film the match from a distant angle and gradually moves forwards. The referee waves his right hand twice to his right rear as he speaks. In the background, some spectators in the audience wave yellow flower balls and placards in their hands. There is a cartoon of a husky on the wall to the right of the frame.", "events": [ { "event": "The video begins with the camera shooting closely on two male wrestlers. One wrestler is on top of another wrestler. Both wrestlers wear dark sports vests with yellow trim. The wrestler on top has black guards over his ears, while the wrestler below has blue guards over his ears. Behind them is a male referee wearing a black and white striped T-shirt and a red wristband. He is obese and has a whistle around his neck. He lies on his stomach as his left hand props on the ground and looks at the two wrestlers. He moves his body a little to his right. There is a seated female spectator in the background.", "visual_elements": [ { "content": "The video begins with the camera shooting closely on two male wrestlers.", "type": "camera", "weight": 3 }, { "content": "One wrestler is on top of another wrestler.", "type": "attribute", "weight": 2 }, { "content": "Both wrestlers wear dark sports vests with yellow trim.", "type": "attribute", "weight": 2 }, { "content": "The wrestler on top has black guards over his ears, while the wrestler below has blue guards over his ears.", "type": "attribute", "weight": 1 }, { "content": "Behind the two wrestler is a male referee wearing a black and white striped T-shirt and a red wristband.", "type": "attribute", "weight": 2 }, { "content": "The referee is obese and has a whistle around his neck.", "type": "attribute", "weight": 2 }, { "content": "The referee lies on his stomach as his left hand props on the ground and looks at the two wrestlers.", "type": "attribute", "weight": 2 }, { "content": "The referee moves his body a little to his right.", "type": "action", "weight": 2 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 }, { "content": "There is a seated female spectator in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to an adult man in a blue shirt. He stands with his head slightly bowed and his hands on his hips and stars forward with a serious expression. In the background, two wrestlers in black tights are painted on the wall with the word \"DOWN\" underneath.", "visual_elements": [ { "content": "The camera switches to an adult man in a blue shirt.", "type": "camera", "weight": 3 }, { "content": "The adult man stands with his head slightly bowed and his hands on his hips and stars forward with a serious expression.", "type": "attribute", "weight": 2 }, { "content": "Two wrestlers in black tights are painted on the wall behind the adult man.", "type": "scene", "weight": 2 }, { "content": "The word \"DOWN\" is underneath the wall behind the adult man.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to film the match from a distant angle and gradually moves forwards. The referee waves his right hand twice to his right rear as he speaks. In the background, some spectators in the audience wave yellow flower balls and placards in their hands. There is a cartoon of a husky on the wall to the right of the frame.", "visual_elements": [ { "content": "The camera switches to film the match from a distant angle and gradually moves forwards.", "type": "camera", "weight": 3 }, { "content": "The referee waves his right hand twice to his right rear as he speaks.", "type": "action", "weight": 3 }, { "content": "In the background, some spectators in the audience wave yellow flower balls and placards in their hands.", "type": "scene", "weight": 2 }, { "content": "There is a cartoon of a husky on the wall to the right of the frame.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0050", "video_path": "DREAM-1K/283.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 9.75, "resolution": { "width": 1920, "height": 1024 }, "caption": "The video begins with the camera shooting from the right rear of a man in the car. The man wears a hat, sits in the driver's seat and looks ahead. The view out the front windscreen of the car is blurred. The car is travelling forward on a road. There are some cars parked on the left side of the street with their heads facing the camera and some buildings next to them.\nThe camera gradually moves to the right and the scene becomes clear. There is a line of cars parked on the right side of the road in the frame. There are some tall bare trees and a few buildings on the right side of the road. The car moves slowly. When the car passes a grey car parked with its front facing the camera, two passers-by appear on the right side of the frame walking in the opposite direction of the car.\nThe passer-by near the road is a black man. The man wears a grey coat, black trousers, leather shoes and a hat. The woman wears a blue coat and high heels, with her hands in her pockets and a handbag in her left arm. They walks shoulder to shoulder. The black man extends his right hand out of his pocket and then gestures towards the road before putting it back in his pocket, while talking with his face towards the woman. The woman looks at looks at the black man on her right.\nAs the two passers-by move to the position of the right window glass of the car, the camera rotates quickly to the right and moves back, leaving only the backs of the two people in the shot. The steps of the building, which the two passers-by walk pass, have iron railings on them. The outside wall of the building is dark in and a little bit old.", "events": [ { "event": "The video begins with the camera shooting from the right rear of a man in the car. The man wears a hat, sits in the driver's seat and looks ahead. The view out the front windscreen of the car is blurred. The car is travelling forward on a road. There are some cars parked on the left side of the street with their heads facing the camera and some buildings next to them.", "visual_elements": [ { "content": "The video begins with the camera shooting from the right rear of a man in the car.", "type": "camera", "weight": 3 }, { "content": "The man wears a hat, sits in the driver's seat and looks ahead.", "type": "attribute", "weight": 2 }, { "content": "The view out the front windscreen of the car is blurred.", "type": "attribute", "weight": 2 }, { "content": "The car is travelling forward on a road.", "type": "action", "weight": 3 }, { "content": "There are some cars parked on the left side of the street with their heads facing the camera and some buildings next to them.", "type": "scene", "weight": 2 } ] }, { "event": "The camera gradually moves to the right and the scene becomes clear. There is a line of cars parked on the right side of the road in the frame. There are some tall bare trees and a few buildings on the right side of the road. The car moves slowly. When the car passes a grey car parked with its front facing the camera, two passers-by appear on the right side of the frame walking in the opposite direction of the car.", "visual_elements": [ { "content": "The camera gradually moves to the right and the scene becomes clear.", "type": "camera", "weight": 3 }, { "content": "There is a line of cars parked on the right side of the road in the frame.", "type": "scene", "weight": 2 }, { "content": "There are some tall bare trees and a few buildings on the right side of the road.", "type": "scene", "weight": 2 }, { "content": "The car moves slowly.", "type": "attribute", "weight": 2 }, { "content": "Two passers-by appear on the right side of the frame walking in the opposite direction of the car.", "type": "camera", "weight": 3 } ] }, { "event": "The passer-by near the road is a black man. The man wears a grey coat, black trousers, leather shoes and a hat. The woman wears a blue coat and high heels, with her hands in her pockets and a handbag in her left arm. They walks shoulder to shoulder. The black man extends his right hand out of his pocket and then gestures towards the road before putting it back in his pocket, while talking with his face towards the woman. The woman looks at looks at the black man on her right.", "visual_elements": [ { "content": "The passer-by near the road is a black man.", "type": "attribute", "weight": 2 }, { "content": "The man wears a grey coat, black trousers, leather shoes and a hat.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a blue coat and high heels, with her hands in her pockets and a handbag in her left arm.", "type": "attribute", "weight": 2 }, { "content": "They walk shoulder to shoulder.", "type": "attribute", "weight": 2 }, { "content": "The black man extends his right hand out of his pocket and then gestures towards the road before putting it back in his pocket.", "type": "action", "weight": 3 }, { "content": "The black men is talking with his face towards the woman.", "type": "action", "weight": 3 }, { "content": "The woman looks at looks at the black man on her right.", "type": "attribute", "weight": 2 } ] }, { "event": "As the two passers-by move to the position of the right window glass of the car, the camera rotates quickly to the right and moves back, leaving only the backs of the two people in the shot. The steps of the building, which the two passers-by walk pass, have iron railings on them. The outside wall of the building is dark in and a little bit old.", "visual_elements": [ { "content": "The camera rotates quickly to the right and moves back, leaving only the backs of the two people in the shot.", "type": "camera", "weight": 3 }, { "content": "The steps of the building, which the two passers-by walk pass, have iron railings on them.", "type": "attribute", "weight": 2 }, { "content": "The outside wall of the building is dark in and a little bit old.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0051", "video_path": "DREAM-1K/290.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 3.62, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with the camera focusing on a middle-aged man and woman. The two stand on a street and the man holds the woman's right arm in his left arm. The two walk together in the direction of the camera. The man wears a brown coat, black underwear and brown trousers, while the woman wears a light grey coat with a blue sweater and black trousers. She carries a bag over her left shoulder. Behind the two, there is a passerby walking away from the direction of the camera, with a bag in the left hand. Behind the two on the left is a clothing shop. In front of the two, a man wearing a knit cap looks at them.\nAs the two people walk forward, the man in the knit cap quickly turns his head in the direction of the camera. The camera switches to the ground. A lit cigarette is thrown on the ground and rolls towards the right side of the frame.\nThe camera switches to show two young people. The young person on the left of the frame is a woman. She wears a dark knitted hat, a scarf and a blue denim jacket. The young person on the right hand side of the frame is man. He wears a green printed jacket, a floral scarf around his neck and carries a backpack.\nThe young woman covers her mouth with her right hand and lowers her head to her left side. The young woman turns her head to face the direction of the camera. The young man looks ahead and then lowers his head. At the end of the video, the camera closes in again on the ground, showing a foot wearing a brown canvas shoe and stepping on the extinguished cigarette.", "events": [ { "event": "The video begins with the camera focusing on a middle-aged man and woman. The two stand on a street and the man holds the woman's right arm in his left arm. The two walk together in the direction of the camera. The man wears a brown coat, black underwear and brown trousers, while the woman wears a light grey coat with a blue sweater and black trousers. She carries a bag over her left shoulder. Behind the two, there is a passerby walking away from the direction of the camera, with a bag in the left hand. Behind the two on the left is a clothing shop. In front of the two, a man wearing a knit cap looks at them.", "visual_elements": [ { "content": "The video begins with the camera focusing on a middle-aged man and woman.", "type": "camera", "weight": 3 }, { "content": "The two stand on a street and the man holds the woman's right arm in his left arm.", "type": "attribute", "weight": 2 }, { "content": "The two walk together in the direction of the camera.", "type": "action", "weight": 3 }, { "content": "The man wears a brown coat, black underwear and brown trousers.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a light grey coat with a blue sweater and black trousers.", "type": "attribute", "weight": 2 }, { "content": "The woman carries a bag over her left shoulder.", "type": "attribute", "weight": 2 }, { "content": "Behind the two, there is a passerby walking away from the direction of the camera, with a bag in the left hand.", "type": "scene", "weight": 2 }, { "content": "Behind the two on the left is a clothing shop.", "type": "scene", "weight": 2 }, { "content": "In front of the two, a man wearing a knit cap looks at them.", "type": "action", "weight": 1 } ] }, { "event": "As the two people walk forward, the man in the knit cap quickly turns his head in the direction of the camera. The camera switches to the ground. A lit cigarette is thrown on the ground and rolls towards the right side of the frame.", "visual_elements": [ { "content": "The man in the knit cap quickly turns his head in the direction of the camera.", "type": "action", "weight": 3 }, { "content": "The camera switches to the ground.", "type": "camera", "weight": 3 }, { "content": "A lit cigarette is thrown on the ground and rolls towards the right side of the frame.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to show two young people. The young person on the left of the frame is a woman. She wears a dark knitted hat, a scarf and a blue denim jacket. The young person on the right hand side of the frame is man. He wears a green printed jacket, a floral scarf around his neck and carries a backpack.", "visual_elements": [ { "content": "The camera switches to show two young people.", "type": "camera", "weight": 3 }, { "content": "The young person on the left of the frame is a woman.", "type": "attribute", "weight": 3 }, { "content": "The young woman wears a dark knitted hat, a scarf and a blue denim jacket.", "type": "attribute", "weight": 2 }, { "content": "The young person on the right hand side of the frame is man.", "type": "attribute", "weight": 3 }, { "content": "He wears a green printed jacket, a floral scarf around his neck and carries a backpack.", "type": "attribute", "weight": 2 } ] }, { "event": "The young woman covers her mouth with her right hand and lowers her head to her left side. The young woman turns her head to face the direction of the camera. The young man looks ahead and then lowers his head. At the end of the video, the camera closes in again on the ground, showing a foot wearing a brown canvas shoe and stepping on the extinguished cigarette.", "visual_elements": [ { "content": "The young woman covers her mouth with her right hand and lowers her head to her left side.", "type": "action", "weight": 3 }, { "content": "The young woman turns her head to face the direction of the camera.", "type": "action", "weight": 3 }, { "content": "The young man looks ahead and then lowers his head.", "type": "action", "weight": 2 }, { "content": "The camera closes in again on the ground, showing a foot wearing a brown canvas shoe and stepping on the extinguished cigarette.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0052", "video_path": "DREAM-1K/292.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.88, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with the camera focusing on a young woman from her right side. She wears a denim jacket, a dark knitte cap and a dark scarf. She holds a black camera in her left hand and has a white cigarette between the middle and index fingers of her right hand. In front of her, a young man faces her and extends his left hand forward. The man wears a green coat with prints, brown trousers, a floral scarf. The man carries a backpack. The two stand on a street. A passerby in black clothes stands on the woman's front left and looks at the two. There are several buildings in the distance.\nThe camera then switches to the front of the woman. She is smoking a cigarette, with her right index and middle fingers in front of her mouth and two silver rings on her middle and ring fingers. Behind the woman, some pedestrians walk with their backs to the camera. There are some tall bare trees on the right side of the road in the frame. Behind the woman on the left are three people sitting on chairs by the roadside. A woman dressed in black holds a mobile phone in her right hand. A man in a grey jacket is on her left. He holds on a railing with his left hand and looks in the direction of the camera.\nThe camera moves to the left and then rotates to the left. The woman drops the cigarette on the ground with her right hand and then holds the black camera in both hands. At the same time, two passers-by walk past the woman's left side to walk towards the right side of the frame. Finally, the camera focuses on the man and the woman disappears to the left side of the frame. The male extends his left hand straight ahead with five fingers spreading wide. He wears a watch on his left hand and waves his right hand backwards then forwards at the same time. At the same time, the man takes a few steps forward as he speaks.", "events": [ { "event": "The video begins with the camera focusing on a young woman from her right side. She wears a denim jacket, a dark knitte cap and a dark scarf. She holds a black camera in her left hand and has a white cigarette between the middle and index fingers of her right hand. In front of her, a young man faces her and extends his left hand forward. The man wears a green coat with prints, brown trousers, a floral scarf. The man carries a backpack. The two stand on a street. A passerby in black clothes stands on the woman's front left and looks at the two. There are several buildings in the distance.", "visual_elements": [ { "content": "The camera focuses on a young woman from her right side.", "type": "camera", "weight": 3 }, { "content": "The woman wears a denim jacket, a dark knitte cap and a dark scarf.", "type": "attribute", "weight": 2 }, { "content": "The woman holds a black camera in her left hand and has a white cigarette between the middle and index fingers of her right hand.", "type": "attribute", "weight": 3 }, { "content": "In front of the woman, a young man faces her.", "type": "attribute", "weight": 2 }, { "content": "The man extends his left hand forward.", "type": "action", "weight": 3 }, { "content": "The man wears a green coat with prints, brown trousers, a floral scarf.", "type": "attribute", "weight": 2 }, { "content": "The man carries a backpack.", "type": "attribute", "weight": 1 }, { "content": "The man and the woman stand on a street.", "type": "attribute", "weight": 3 }, { "content": "A passerby in black clothes stands on the woman's front left and looks at the two. There are several buildings in the distance.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then switches to the front of the woman. She is smoking a cigarette, with her right index and middle fingers in front of her mouth and two silver rings on her middle and ring fingers. Behind the woman, some pedestrians walk with their backs to the camera. There are some tall bare trees on the right side of the road in the frame. Behind the woman on the left are three people sitting on chairs by the roadside. A woman dressed in black holds a mobile phone in her right hand. A man in a grey jacket is on her left. He holds on a railing with his left hand and looks in the direction of the camera.", "visual_elements": [ { "content": "The camera then switches to the front of the woman.", "type": "camera", "weight": 3 }, { "content": "She is smoking a cigarette, with her right index and middle fingers in front of her mouth.", "type": "action", "weight": 3 }, { "content": "The woman wear two silver rings on her middle and ring fingers.", "type": "attribute", "weight": 1 }, { "content": "Behind the woman, some pedestrians walk with their backs to the camera.", "type": "scene", "weight": 1 }, { "content": "There are some tall bare trees on the right side of the road in the frame.", "type": "scene", "weight": 2 }, { "content": "Behind the woman on the left are three people sitting on chairs by the roadside.", "type": "scene", "weight": 1 }, { "content": "A woman dressed in black holds a mobile phone in her right hand. A man in a grey jacket is on her left.", "type": "scene", "weight": 1 }, { "content": "The man in the grey coat holds on a railing with his left hand and looks in the direction of the camera.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves to the left and then rotates to the left. The woman drops the cigarette on the ground with her right hand and then holds the black camera in both hands. At the same time, two passers-by walk past the woman's left side to walk towards the right side of the frame. Finally, the camera focuses on the man and the woman disappears to the left side of the frame. The male extends his left hand straight ahead with five fingers spreading wide. He wears a watch on his left hand and waves his right hand backwards then forwards at the same time. At the same time, the man takes a few steps forward as he speaks.", "visual_elements": [ { "content": "The camera moves to the left and then rotates to the left.", "type": "camera", "weight": 3 }, { "content": "The woman drops the cigarette on the ground with her right hand and then holds the black camera in both hands.", "type": "action", "weight": 3 }, { "content": "Two passers-by walk past the woman's left side to walk towards the right side of the frame.", "type": "scene", "weight": 2 }, { "content": "Finally, the camera focuses on the man and the woman disappears to the left side of the frame.", "type": "camera", "weight": 3 }, { "content": "The male extends his left hand straight ahead with five fingers spreading wide.", "type": "action", "weight": 3 }, { "content": "The man wears a watch on his left hand.", "type": "attribute", "weight": 1 }, { "content": "The man waves his right hand backwards then forwards at the same time.", "type": "action", "weight": 2 }, { "content": "The man takes a few steps forward as he speaks.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 25 }, { "index": "TUNA_0053", "video_path": "DREAM-1K/301.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 13.25, "resolution": { "width": 1824, "height": 1024 }, "caption": "The video begins as the camera zooms in on a black sedan with a parking lot in the background. A man wearing a grey jacket and blue jeans with his back to the camera appears on the right side of the screen. Immediately, he is knocked down by another man with a white vest, blue jeans and a white hat. The man in the grey jacket spins to face the camera and falls to the ground. The man in the white vest runs quickly to the same black car.\nThe man in the white vest runs to the car, opens the door with force and gets in the car quickly before closing the door. The camera, shooting from the side, shows that he sits in the driver's seat with his hands on the steering wheel. Then, the camera turns to the left as the man in the white vest begins to start the car. At the moment, the man in the grey jacket raises his hand to hold the car window, and the man in the white vest looks out of the window. Then, the man in the white vest opens the car door and slams it shut. He opens the door again and shuts it hard again to get rid of the hand on the window. Finally, the man in the white vest grasps the the steering wheel, starts the car, and drives off quickly.\nFinally, the black car pulls out of the frame. The man in the grey jacket quickly stands up and looks in the direction of the black car. In the background, buildings and trees in the distance are still visible.", "events": [ { "event": "The video begins as the camera zooms in on a black sedan with a parking lot in the background. A man wearing a grey jacket and blue jeans with his back to the camera appears on the right side of the screen. Immediately, he is knocked down by another man with a white vest, blue jeans and a white hat. The man in the grey jacket spins to face the camera and falls to the ground. The man in the white vest runs quickly to the same black car.", "visual_elements": [ { "content": "The camera zooms in on a black sedan.", "type": "camera", "weight": 3 }, { "content": "A parking lot is in the background.", "type": "scene", "weight": 2 }, { "content": "A man wears a grey jacket and blue jeans.", "type": "attribute", "weight": 3 }, { "content": "With his back to the camera, he appears on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "Another man wears a white vest, blue jeans and a white cap.", "type": "attribute", "weight": 3 }, { "content": "The man in the grey jacket spins to face the camera and falls to the ground.", "type": "action", "weight": 3 }, { "content": "The man in the white vest runs quickly to the same black car.", "type": "action", "weight": 3 } ] }, { "event": "The man in the white vest runs to the car, opens the door with force and gets in the car quickly before closing the door. The camera, shooting from the side, shows that he sits in the driver's seat with his hands on the steering wheel. Then, the camera turns to the left as the man in the white vest begins to start the car. At the moment, the man in the grey jacket raises his hand to hold the car window, and the man in the white vest looks out of the window. Then, the man in the white vest opens the car door and slams it shut. He opens the door again and shuts it hard again to get rid of the hand on the window. Finally, the man in the white vest grasps the the steering wheel, starts the car, and drives off quickly.", "visual_elements": [ { "content": "The man in the white vest runs to the car", "type": "action", "weight": 2 }, { "content": "The man in the white vest opens the door with force.", "type": "action", "weight": 2 }, { "content": "The man in the white vest gets in the car before closing the door.", "type": "action", "weight": 2 }, { "content": "The camera shoots from the side.", "type": "camera", "weight": 3 }, { "content": "The man in the white vest holds the steering wheel with his hands.", "type": "action", "weight": 2 }, { "content": "The camera turns to the left.", "type": "camera", "weight": 3 }, { "content": "The man in the white vest begins to start the car.", "type": "action", "weight": 2 }, { "content": "The man in the grey jacket raises his hand to hold the car window.", "type": "action", "weight": 3 }, { "content": "The man in the white vest looks out of the window.", "type": "action", "weight": 2 }, { "content": "The man in the white vest repeatedly opens the door and slam it shut.", "type": "action", "weight": 3 }, { "content": "The hand on the window slips off.", "type": "action", "weight": 2 }, { "content": "The man in the white vest starts the car and drives off quickly.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the black car pulls out of the frame. The man in the grey jacket quickly stands up and looks in the direction of the black car. In the background, buildings and trees in the distance are still visible.", "visual_elements": [ { "content": "The black car pulls out of the frame.", "type": "scene", "weight": 2 }, { "content": "The man in the grey jacket quickly stands up.", "type": "action", "weight": 3 }, { "content": "The man in the grey jacket looks in the direction of the black car.", "type": "action", "weight": 3 }, { "content": "Buildings and trees in the distances are still visible.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0054", "video_path": "DREAM-1K/302.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic", "domain": "Film", "duration": 5.5, "resolution": { "width": 1920, "height": 816 }, "caption": "The video begins with the camera focusing on a man wearing a dark suit with a plaid shirt and a black tie. He is standing on the porch of a house with the darkness of night as the background and two vintage-style pendant lights hanging from the porch, illuminating the surroundings.\nThe man stretches out his right hand and rings the doorbell. He then takes a few steps back and looks up. The camera shoots from his side and can clearly capture the action of ringing the doorbell and his look of waiting.\nThe man then turns his head to the left and looks to the left.", "events": [ { "event": "The video begins with the camera focusing on a man wearing a dark suit with a plaid shirt and a black tie. He is standing on the porch of a house with the darkness of night as the background and two vintage-style pendant lights hanging from the porch, illuminating the surroundings.", "visual_elements": [ { "content": "The camera focuses on a man.", "type": "camera", "weight": 3 }, { "content": "The man wears a dark suit with a plaid shirt and a black tie.", "type": "attribute", "weight": 3 }, { "content": "He stands on a porch of a house.", "type": "attribute", "weight": 3 }, { "content": "The darkness of night is the background.", "type": "scene", "weight": 2 }, { "content": "Two vintage-style pendant lights hang from the porch, illuminating the surroundings.", "type": "scene", "weight": 3 } ] }, { "event": "The man stretches out his right hand and rings the doorbell. He then takes a few steps back and looks up. The camera shoots from his side and can clearly capture the action of ringing the doorbell and his look of waiting.", "visual_elements": [ { "content": "The man stretches out his right hand and rings the doorbell.", "type": "action", "weight": 3 }, { "content": "The man then takes a few steps back.", "type": "action", "weight": 3 }, { "content": "The man looks up.", "type": "action", "weight": 3 }, { "content": "The camera shoots from his side.", "type": "camera", "weight": 3 }, { "content": "The camera can clearly capture the action of ringing the doorbell and his look of waiting.", "type": "camera", "weight": 3 } ] }, { "event": "The man then turns his head to the left and looks to the left.", "visual_elements": [ { "content": "The man turns his head to the left and looks to the left.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0055", "video_path": "DREAM-1K/303.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.12, "resolution": { "width": 1920, "height": 816 }, "caption": "The video begins with the camera focusing on the doorway of a bathroom and then the view shot from the outside to the inside of the door. There is a woman and a man in the bathroom. The camera then turns to the woman and the man seated behind her. The man is at the lower right corner of the frame, while the woman is in the left-center of the screen,standing in front of a mirror. The woman wears a grey knitted sweater and jeans. Standing in front of the sink, the woman is facing the mirror and fixing her hair with her left hand. There are some toiletries and towels on the sink, and several bottles of water and other items on the shelf next to it.\nThe man sits on the toilet, wearing a black sweater and brown pants, with his white shirt collar exposed. He lowers his head and then stands up. The background wall is made of bricks and the windows have frosted glass, through which you can see the light outside.\nThe camera then switches to a close-up shot of a hand closing a cabinet door with a metal button.\nThe camera returns to a panoramic view of the bathroom where the man walks over to the sink, picks up a towel and begins to wipe his hands. The woman holds a glass of drink with both hands and lowers her head to face the sink. She then leans against a metal pole, shaking the bottom of the glass gently with her right hand as she looks up at the man and speaks to him. The man turns his head first to the left and then in the direction of the woman.", "events": [ { "event": "The video begins with the camera focusing on the doorway of a bathroom and then the view shot from the outside to the inside of the door. There is a woman and a man in the bathroom. The camera then turns to the woman and the man seated behind her. The man is at the lower right corner of the frame, while the woman is in the left-center of the screen,standing in front of a mirror. The woman wears a grey knitted sweater and jeans. Standing in front of the sink, the woman is facing the mirror and fixing her hair with her left hand. There are some toiletries and towels on the sink, and several bottles of water and other items on the shelf next to it.", "visual_elements": [ { "content": "The camera focusing on the doorway of a bathroom and then the view shot from the outside to the inside", "type": "camera", "weight": 3 }, { "content": "There is a woman and man in the bathroom.", "type": "scene", "weight": 3 }, { "content": "The camera turns to the woman and the man seated behind her.", "type": "camera", "weight": 3 }, { "content": "The man is at the lower corner of the frame.", "type": "attribute", "weight": 3 }, { "content": "The woman is in the left-center of the screen, standing in front of a corner.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a grey knitted sweater and jeans.", "type": "attribute", "weight": 3 }, { "content": "Standing in front of the sink, the woman is facing the mirror.", "type": "attribute", "weight": 2 }, { "content": "The woman is fixing her hair with her left hand.", "type": "action", "weight": 3 }, { "content": "There are some toiletries and towels on the sink, and several bottles of water and other items on the shelf next to it.", "type": "scene", "weight": 2 } ] }, { "event": "The man sits on the toilet, wearing a black sweater and brown pants, with his white shirt collar exposed. He lowers his head and then stands up. The background wall is made of bricks and the windows have frosted glass, through which you can see the light outside.", "visual_elements": [ { "content": "The man sits on the toilet.", "type": "attribute", "weight": 2 }, { "content": "The man wears a black sweater and brown pants, with his white shirt collar exposed.", "type": "attribute", "weight": 3 }, { "content": "He lowers his head and then stands up.", "type": "action", "weight": 3 }, { "content": "The background wall is made of bricks and the windows have frosted glass, through which you can see the light outside.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then switches to a close-up shot of a hand closing a cabinet door with a metal button.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "A hand is closing a cabinet door with metal a button.", "type": "action", "weight": 3 } ] }, { "event": "The camera returns to a panoramic view of the bathroom where the man walks over to the sink, picks up a towel and begins to wipe his hands. The woman holds a glass of drink with both hands and lowers her head to face the sink. She then leans against a metal pole, shaking the bottom of the glass gently with her right hand as she looks up at the man and speaks to him. The man turns his head first to the left and then in the direction of the woman.", "visual_elements": [ { "content": "The camera returns to a panoramic view of the bathroom.", "type": "camera", "weight": 3 }, { "content": "The man walks over to the sink.", "type": "action", "weight": 3 }, { "content": "The man picks up a towel and begins to wipe his hands.", "type": "action", "weight": 3 }, { "content": "The woman holds a glass of drink with both hands.", "type": "attribute", "weight": 2 }, { "content": "The woman lowers her head to face the sink.", "type": "action", "weight": 2 }, { "content": "The woman shakes the bottom of the glass with her right hand.", "type": "action", "weight": 1 }, { "content": "Leaning against a metal pole, the woman looks up at the man and speaks to him.", "type": "action", "weight": 3 }, { "content": "The man turns his head first to the left.", "type": "action", "weight": 1 }, { "content": "The man then turns in the direction of the woman.", "type": "action", "weight": 1 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0056", "video_path": "DREAM-1K/304.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.62, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with a top-down view of a black male player in a red basketball uniform who is leaping high into the air, holding the basketball in both hands and dunking it upwards. The background features a wooden basketball court with black marking lines on the floor. Next, the camera switches to a closer view of the black player's hands dunking the basketball into the basket and grasping the basket with both hands as the basket net upturns due to the impact of the basketball. During the process, a man next to the black player raises his hands in defence of the black player's goal. The camera then turns to the sidelines where several males and players in suits stand up. One of the men in a black suit, white shirt and black tie stands up excitedly and waves his fist. Next to him are several players wearing sportswear with the words \"Texas Western\". And there are more spectators taking pictures and cheering in the background.\nThe camera continues to switch to capture more viewers and players who are all cheering for the dunk. The audience wear different colours of clothes. some are applauding while others are waving.\nNext, the camera closes in on an elderly man wearing a black suit, white shirt and black tie. He looks at the lower left in silence. The camera moves to the right, with the cheering crowd still in the background and someone walking past the camera.", "events": [ { "event": "The video begins with a top-down view of a black male player in a red basketball uniform who is leaping high into the air, holding the basketball in both hands and dunking it upwards. The background features a wooden basketball court with black marking lines on the floor. Next, the camera switches to a closer view of the black player's hands dunking the basketball into the basket and grasping the basket with both hands as the basket net upturns due to the impact of the basketball. During the process, a man next to the black player raises his hands in defence of the black player's goal. The camera then turns to the sidelines where several males and players in suits stand up. One of the men in a black suit, white shirt and black tie stands up excitedly and waves his fist. Next to him are several players wearing sportswear with the words \"Texas Western\". And there are more spectators taking pictures and cheering in the background.", "visual_elements": [ { "content": "It is a top-down view.", "type": "camera", "weight": 3 }, { "content": "A black male player in a red basketball uniform.", "type": "attribute", "weight": 3 }, { "content": "The black male player is leaping high into the air, holding the basketball in both hands and dunking it upwards.", "type": "action", "weight": 3 }, { "content": "The background features a wooden basketball court with black marking lines on the floor.", "type": "scene", "weight": 1 }, { "content": "The camera switches to a closer view.", "type": "camera", "weight": 3 }, { "content": "The black player's hands dunking the basketball into the basket.", "type": "action", "weight": 3 }, { "content": "The black player grasps the basket with both hands", "type": "action", "weight": 3 }, { "content": "The basket net upturns due to the impact of the basketball.", "type": "scene", "weight": 2 }, { "content": "A man next to the black player raises his hands in defence of the black player's goal.", "type": "action", "weight": 2 }, { "content": "The camera turns to the sidelines.", "type": "camera", "weight": 3 }, { "content": "Several males and players in suits stand up.", "type": "action", "weight": 2 }, { "content": "A man wears a black suit, white shirt and black tie.", "type": "attribute", "weight": 2 }, { "content": "The man in a black suit stands up excitedly and waves his fist.", "type": "action", "weight": 3 }, { "content": "Several players nearby wear sportswear with the words \"Texas Western\".", "type": "attribute", "weight": 2 }, { "content": "There are more spectators taking pictures and cheering in the background.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to switch to capture more viewers and players who are all cheering for the dunk. The audience wear different colours of clothes. some are applauding while others are waving.", "visual_elements": [ { "content": "The camera continues to switch to capture more viewers and the players.", "type": "camera", "weight": 3 }, { "content": "They are all cheering for the dunk.", "type": "scene", "weight": 2 }, { "content": "The audience wear different colours of clothes.", "type": "attribute", "weight": 1 }, { "content": "Some are applauding while others are waving.", "type": "action", "weight": 1 } ] }, { "event": "Next, the camera closes in on an elderly man wearing a black suit, white shirt and black tie. He looks at the lower left in silence. The camera moves to the right, with the cheering crowd still in the background and someone walking past the camera.", "visual_elements": [ { "content": "Next, the camera closes in on an elderly man.", "type": "camera", "weight": 3 }, { "content": "The elderly man wears a black suit, white shirt and black tie.", "type": "attribute", "weight": 3 }, { "content": "The elderly man looks at the lower left in silence.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The cheering crowd are still in the background.", "type": "scene", "weight": 2 }, { "content": "Someone walks past the camera.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 25 }, { "index": "TUNA_0057", "video_path": "DREAM-1K/305.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 5.38, "resolution": { "width": 1920, "height": 1024 }, "caption": "The video begins with the camera focusing on a man with a wound above his right eyes. He wears a dark jacket, white shirt and grey hat. \nHe looks down and then blinks while swallowing.\nNext, the camera switches to a side view of the man. He stands in front of a row of metal lockers, with neatly arranged lockers as the background. The man opens the door of a locker with his right han. Then, the man reaches for the door with his left hand.", "events": [ { "event": "The video begins with the camera focusing on a man with a wound above his right eyes. He wears a dark jacket, white shirt and grey hat. \nHe looks down and then blinks while swallowing.", "visual_elements": [ { "content": "The video begins with the camera focusing on a man wearing a dark jacket and white shirt.", "type": "camera", "weight": 3 }, { "content": "The man wears a dark jacket, white shirt and grey hat.", "type": "attribute", "weight": 3 }, { "content": "The man has a wound above his right eye.", "type": "attribute", "weight": 3 }, { "content": "The man looks down and then blinks.", "type": "action", "weight": 2 }, { "content": "The man swallows.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera switches to a side view of the man. He stands in front of a row of metal lockers, with neatly arranged lockers as the background. The man opens the door of a locker with his right han. Then, the man reaches for the door with his left hand.", "visual_elements": [ { "content": "The camera switches to a side view of the man.", "type": "camera", "weight": 3 }, { "content": "The man stands in front of a row of metal lockers.", "type": "attribute", "weight": 2 }, { "content": "Neatly arranged lockers is the background.", "type": "scene", "weight": 2 }, { "content": "The man opens the door of a locker with his right hand.", "type": "action", "weight": 3 }, { "content": "The man reaches for the door with his left hand.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0058", "video_path": "DREAM-1K/306.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 5.62, "resolution": { "width": 1920, "height": 1024 }, "caption": "The video shows a man and a woman standing in a corridor. The man wears a dark coat, while the woman wears a red jacket with a white collar. The background is an ornately decorated corridor with wall lamps, red carpeted floor and a door at the end.\nIn the video, the camera focuses mainly on the two characters. The woman closes her eyes with her head down. She is frowning and sobbing quietly. The man gently strokes the woman's chin with his left hand and then puts his left hand down. Next, The man raises his right hand to stroke the woman's chin and then gets closer to her to kiss her forehead.", "events": [ { "event": "The video shows a man and a woman standing in a corridor. The man wears a dark coat, while the woman wears a red jacket with a white collar. The background is an ornately decorated corridor with wall lamps, red carpeted floor and a door at the end.", "visual_elements": [ { "content": "A man and a woman stand in a corridor.", "type": "scene", "weight": 2 }, { "content": "The man wears a dark coat.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a red jacket with a white collar.", "type": "attribute", "weight": 3 }, { "content": "The background is an ornately decorated corridor with wall lamps, red carpeted floor and a door at the end.", "type": "scene", "weight": 2 } ] }, { "event": "In the video, the camera focuses mainly on the two characters. The woman closes her eyes with her head down. She is frowning and sobbing quietly. The man gently strokes the woman's chin with his left hand and then puts his left hand down. Next, The man raises his right hand to stroke the woman's chin and then gets closer to her to kiss her forehead.", "visual_elements": [ { "content": "The woman closes her eyes with her head down.", "type": "attribute", "weight": 2 }, { "content": "She is frowning and sobbing quietly.", "type": "action", "weight": 3 }, { "content": "The man gently strokes the woman's chin with his left hand and then puts his left hand down.", "type": "action", "weight": 3 }, { "content": "The man raises his right hand to stroke the woman's chin.", "type": "action", "weight": 3 }, { "content": "The man gets closer to the woman to kiss her forehead.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0059", "video_path": "DREAM-1K/307.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 8.25, "resolution": { "width": 1920, "height": 1024 }, "caption": "The video begins with the image of a man wearing a black coat and a grey hat, with his left hand in the coat pocket and a white cup in his right hand. The background is a street lined with red brick buildings and some shops. On the right of the man is a couple holding an umbrella, and on the street behind the man are pedestrians holding umbrellas and a boy selling newspapers.\nThe man walks forward and on his left is a row of flower shops with various flowers placed under green awnings. The man lowers his head a little to avoid the green awnings overhead and continues to walk forward. The camera follows the man and pans right as he walks. The wet street bustles with many pedestrians. The man continues to move forward through the crowd.\nThe camera switches to a wider view, showing a lower angle perspective of the street. A few old cars park on the street and buildings across the street have some ladders attached to the outside walls. The man walks towards the camera, turning his head to look at his left as he walks forward. A green car quickly passes in front of the man, leaving behind white exhaust. The man then continues to jog over to the camera.", "events": [ { "event": "The video begins with the image of a man wearing a black coat and a grey hat, with his left hand in the coat pocket and a white cup in his right hand. The background is a street lined with red brick buildings and some shops. On the right of the man is a couple holding an umbrella, and on the street behind the man are pedestrians holding umbrellas and a boy selling newspapers.", "visual_elements": [ { "content": "A man appears.", "type": "attribute", "weight": 3 }, { "content": "The man wears a black coat and a grey hat.", "type": "attribute", "weight": 3 }, { "content": "His left hand is in the coat pocket and a white cup is in his right hand.", "type": "attribute", "weight": 3 }, { "content": "The background is a street lined with red brick buildings and some shops.", "type": "scene", "weight": 2 }, { "content": "On the right of the man is a couple holding an umbrella.", "type": "attribute", "weight": 2 }, { "content": "On the street behind the man are pedestrians holding umbrellas and a boy selling newspapers.", "type": "scene", "weight": 2 } ] }, { "event": "The man walks forward and on his left is a row of flower shops with various flowers placed under green awnings. The man lowers his head a little to avoid the green awnings overhead and continues to walk forward. The camera follows the man and pans right as he walks. The wet street bustles with many pedestrians. The man continues to move forward through the crowd.", "visual_elements": [ { "content": "The man walks forward.", "type": "action", "weight": 3 }, { "content": "On the man's left is a row of flower shops.", "type": "attribute", "weight": 2 }, { "content": "Various flowers are placed under green awnings.", "type": "scene", "weight": 2 }, { "content": "The man lowers his head a little to avoid the green awnings overhead and continues to walk forward.", "type": "action", "weight": 3 }, { "content": "The camera follows the man and swings right as he walks.", "type": "camera", "weight": 3 }, { "content": "The wet street bustles with many pedestrians.", "type": "scene", "weight": 2 }, { "content": "The man continues to move forward through the crowd.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to a wider view, showing a lower angle perspective of the street. A few old cars park on the street and buildings across the street have some ladders attached to the outside walls. The man walks towards the camera, turning his head to look at his left as he walks forward. A green car quickly passes in front of the man, leaving behind white exhaust. The man then continues to jog over to the camera.", "visual_elements": [ { "content": "The camera switches to a wider view.", "type": "camera", "weight": 3 }, { "content": "The camera shows a lower angle perspective of the street.", "type": "camera", "weight": 3 }, { "content": "A few old cars park on the street.", "type": "attribute", "weight": 3 }, { "content": "Buildings across the street have some ladders attached to the outside walls", "type": "attribute", "weight": 3 }, { "content": "The man walks towards the camera.", "type": "action", "weight": 3 }, { "content": "The man turns his head to look at his left as he walks forward.", "type": "action", "weight": 3 }, { "content": "A green car quickly passes in front of the man.", "type": "action", "weight": 3 }, { "content": "The man continues to jog over to the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0060", "video_path": "DREAM-1K/308.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 4.25, "resolution": { "width": 1920, "height": 816 }, "caption": "The video begins with the camera focusing on a dining table with two men sitting on opposite sides of it. The man on the left of the screen wears black clothes and the man on the right of the screen wears a blue jean jacket. At the beginning of the scene, the man in black turns sideways to face the camera, holding a newspaper in his right hand and spreading his left hand out on the table. On the table is a white coffee cup and some condiments. The background showcases a street scene outside the window and raindrops on it, which obviuouly demonstrates that it is raining outside. Pedestrians walk on the street.\nThen, the man in black puts down the newspaper and turns to face the man in the jean jacket. Next, he leaned forward a little to adjust his sitting posture. The man in black puts his right hand on the table and raises his left hand to remove the toothpick from his mouth. At the same time, the man in the jean jacket takes out a cigarette case with his right hand and takes a cigarette out with his left hand to put it in his mouth. Finally, the man in the jean jacket glances at the upper left corner.", "events": [ { "event": "The video begins with the camera focusing on a dining table with two men sitting on opposite sides of it. The man on the left of the screen wears black clothes and the man on the right of the screen wears a blue jean jacket. At the beginning of the scene, the man in black turns sideways to face the camera, holding a newspaper in his right hand and spreading his left hand out on the table. On the table is a white coffee cup and some condiments. The background showcases a street scene outside the window and raindrops on it, which obviuouly demonstrates that it is raining outside. Pedestrians walk on the street.", "visual_elements": [ { "content": "The camera focuses on a dining table.", "type": "camera", "weight": 3 }, { "content": "Two men sit on opposite sides of the table.", "type": "attribute", "weight": 2 }, { "content": "The man on the left of the screen wears black clothes.", "type": "attribute", "weight": 3 }, { "content": "The man on the right of the screen wears a blue jean jacket.", "type": "attribute", "weight": 3 }, { "content": "The man in black turns sideways to face the camera.", "type": "attribute", "weight": 3 }, { "content": "The man in black holds a newspaper in his right hand.", "type": "attribute", "weight": 2 }, { "content": "The man in black spreads his left hand out on the table.", "type": "attribute", "weight": 2 }, { "content": "On the table is a white coffee cup and some condiments.", "type": "scene", "weight": 2 }, { "content": "The background showcases a street scene outside the window and raindrops it.", "type": "scene", "weight": 2 }, { "content": "Pedestrians walk on the street.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the man in black puts down the newspaper and turns to face the man in the jean jacket. Next, he leaned forward a little to adjust his sitting posture. The man in black puts his right hand on the table and raises his left hand to remove the toothpick from his mouth. At the same time, the man in the jean jacket takes out a cigarette case with his right hand and takes a cigarette out with his left hand to put it in his mouth. Finally, the man in the jean jacket glances at the upper left corner.", "visual_elements": [ { "content": "The man in black puts down the newspaper.", "type": "action", "weight": 3 }, { "content": "The man in black turns to face the man in the jean jacket.", "type": "action", "weight": 2 }, { "content": "The man in black leans forward a little to adjust his sitting posture.", "type": "action", "weight": 1 }, { "content": "The man in black puts his right hand on the table.", "type": "action", "weight": 2 }, { "content": "The man in black raises his left hand to remove the toothpick from his mouth.", "type": "action", "weight": 3 }, { "content": "The man in the jean jacket takes out a cigarette case with his right hand.", "type": "action", "weight": 2 }, { "content": "The man in the jean jacket takes a cigarette out with his left hand to put it in his mouth.", "type": "action", "weight": 3 }, { "content": "The man in the jean jacket glances at the upper left corner.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0061", "video_path": "DREAM-1K/309.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 7.75, "resolution": { "width": 1824, "height": 1024 }, "caption": "The video begins with a green insect lying on one of a green leaves with shiny surfaces. The camera moves slightly upwards and the insect moves upwards on the leaf. The background is blurred and focuses mainly on the insect and the leaves.\nNext, the camera switches to a new scene showing a blue sky with a flock of birds flying in the same direction. The camera moves to the right to follow the flight path of the flock. The background is the tops of some trees.", "events": [ { "event": "The video begins with a green insect lying on one of a green leaves with shiny surfaces. The camera moves slightly upwards and the insect moves upwards on the leaf. The background is blurred and focuses mainly on the insect and the leaves.", "visual_elements": [ { "content": "A green insect lies on one of a green leaves.", "type": "scene", "weight": 3 }, { "content": "The green leaves have shiny surfaces.", "type": "scene", "weight": 2 }, { "content": "The camera moves slightly upwards.", "type": "camera", "weight": 3 }, { "content": "The insect moves upwards on the leaf.", "type": "action", "weight": 3 }, { "content": "The background is blurred and focuses mainly on the insect and the leaves.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera switches to a new scene showing a blue sky with a flock of birds flying in the same direction. The camera moves to the right to follow the flight path of the flock. The background is the tops of some trees.", "visual_elements": [ { "content": "The camera switches to a new scene.", "type": "camera", "weight": 3 }, { "content": "The scene shows a blue sky.", "type": "scene", "weight": 2 }, { "content": "A flock of birds is flying in the same direction.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The background is the tops of some trees.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0062", "video_path": "DREAM-1K/310.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.75, "resolution": { "width": 1920, "height": 816 }, "caption": "The video begins with the camera focusing on a window with some green plants outside. There are reflections on the glass of the window, through which the scene inside can be seen. The curtains are light coloured with some patterns on them. A woman in a dark down jacket stands by the window, with her back to the camera.\nNext, the camera moves slightly to the right, still focusing on the window. The woman turns around and begins to take off her down jacket, revealing the white uniform inside. The woman then leans forward to place the down jacket in front of her with her right hand.\nThe camera then switches to a new scene. A man wearing a brown jacket and a black cap appears in the frame. He walks out from beside a large tree. In the background are some trees, water in the distance and fences.", "events": [ { "event": "The video begins with the camera focusing on a window with some green plants outside. There are reflections on the glass of the window, through which the scene inside can be seen. The curtains are light coloured with some patterns on them. A woman in a dark down jacket stands by the window, with her back to the camera.", "visual_elements": [ { "content": "The camera focuses on a window.", "type": "camera", "weight": 3 }, { "content": "Some green plants are outside the window.", "type": "scene", "weight": 2 }, { "content": "There are reflections on the glass of the window, through which the scene inside can be seen.", "type": "scene", "weight": 1 }, { "content": "The curtains are light coloured with some patterns on them.", "type": "scene", "weight": 1 }, { "content": "A woman in a dark down jacket stands by the window.", "type": "attribute", "weight": 2 }, { "content": "The woman stands with her back to the camera.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera moves slightly to the right, still focusing on the window. The woman turns around and begins to take off her down jacket, revealing the white uniform inside. The woman then leans forward to place the down jacket in front of her with her right hand.", "visual_elements": [ { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The camera still focuses on the window.", "type": "camera", "weight": 3 }, { "content": "The woman turns around.", "type": "action", "weight": 3 }, { "content": "The woman begins to take off her down jacket.", "type": "action", "weight": 3 }, { "content": "The woman reveals the white uniform inside.", "type": "action", "weight": 3 }, { "content": "The woman leans forward.", "type": "action", "weight": 2 }, { "content": "The woman places her down jacket in front of her with her right hand.", "type": "action", "weight": 2 } ] }, { "event": "The camera then switches to a new scene. A man wearing a brown jacket and a black cap appears in the frame. He walks out from beside a large tree. In the background are some trees, water in the distance and fences.", "visual_elements": [ { "content": "The camera switches to a new scene.", "type": "camera", "weight": 3 }, { "content": "A man wears a brown jacket and a black cap.", "type": "attribute", "weight": 3 }, { "content": "A man walks out from beside a large tree.", "type": "action", "weight": 2 }, { "content": "In the background are some trees, water in the distance and fences.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0063", "video_path": "DREAM-1K/311.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.0, "resolution": { "width": 1888, "height": 1024 }, "caption": "The video begins with the camera focusing on a man standing in front of a wooden door. He wears a brown jacket and grey trousers and turns his head to look behind. In the background is a stone wall covered in withered vines.\nA person with curly brown hair is in the bottom left corner of the frame. This person wears a black jacket and a white undershirt. The camera shoots from behind the person, showing her head and shoulders.\nThe camera then rotates around the person to the right to capture the front of the person. The woman's face is tear-stained, with eyes full of tears. She looks ahead.\nFinally, the camera fixes on the front of the woman, showing her upper body. Her hair swings gently in the breeze. Wooden fences and grass are visible in the background. The sky is clear with clouds floating in the air.", "events": [ { "event": "The video begins with the camera focusing on a man standing in front of a wooden door. He wears a brown jacket and grey trousers and turns his head to look behind. In the background is a stone wall covered in withered vines.", "visual_elements": [ { "content": "The video begins with the camera focusing on a man standing in front of a wooden door.", "type": "camera", "weight": 3 }, { "content": "He wears a brown jacket and grey trousers.", "type": "attribute", "weight": 2 }, { "content": "The man turns his head to look behind.", "type": "action", "weight": 2 }, { "content": "In the background is a stone wall covered in withered vines.", "type": "scene", "weight": 1 } ] }, { "event": "A person with curly brown hair is in the bottom left corner of the frame. This person wears a black jacket and a white undershirt. The camera shoots from behind the person, showing her head and shoulders.", "visual_elements": [ { "content": "A person with curly brown hair is in the bottom left corner of the frame.", "type": "attribute", "weight": 2 }, { "content": "The person wears a black jacket and a white undershirt.", "type": "attribute", "weight": 3 }, { "content": "The camera shoots from behind the person, showing her head and shoulders.", "type": "camera", "weight": 3 } ] }, { "event": "The camera then rotates around the person to the right to capture the front of the person. The woman's face is tear-stained, with eyes full of tears. She looks ahead.", "visual_elements": [ { "content": "The camera rotates around the person to the right to capture the person's face.", "type": "camera", "weight": 3 }, { "content": "The woman's face is tear-stained, with eyes full of tears.", "type": "attribute", "weight": 3 }, { "content": "The woman looks ahead.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera fixes on the front of the woman, showing her upper body. Her hair swings gently in the breeze. Wooden fences and grass are visible in the background. The sky is clear with clouds floating in the air.", "visual_elements": [ { "content": "The camera fixes on the front of the woman, showing her upper body.", "type": "camera", "weight": 3 }, { "content": "The woman's hair swings gently in the breeze.", "type": "scene", "weight": 2 }, { "content": "Wooden fences and grass are visible in the background.", "type": "scene", "weight": 1 }, { "content": "The sky is clear with clouds floating above.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 14 }, { "index": "TUNA_0064", "video_path": "DREAM-1K/312.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 5.75, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with a man wearing a khaki uniform and a pair of sunglasses. His hair is tied back in a ponytail. His uniform has two breast pockets and a dark belt is tied around his waist. He is walking down a corridor with his left hand in his trouser pocket and his right hand playing with a long black stick. The background is a simple indoor environment with light-coloured walls.\nAs he advances, the camera gradually moves to the right. He comes to the door of a room and then stops to turn his head to look inside. The camera stops moving right and shows an iron barred door. Inside the room is a man dressed in white lying curled up on the floor with his head in his arms and his back to the camera.\nThe camera switches back to the man again. He stands in front of the iron barred door, looking down to continue to observe the situation inside the room.", "events": [ { "event": "The video begins with a man wearing a khaki uniform and a pair of sunglasses. His hair is tied back in a ponytail. His uniform has two breast pockets and a dark belt is tied around his waist. He is walking down a corridor with his left hand in his trouser pocket and his right hand playing with a long black stick. The background is a simple indoor environment with light-coloured walls.", "visual_elements": [ { "content": "A man wears a khaki uniform and a pair of sunglasses.", "type": "attribute", "weight": 3 }, { "content": "His hair is tied back in a ponytail. His uniform has two breast pockets and a dark belt is tied around his waist.", "type": "attribute", "weight": 2 }, { "content": "He is walking down a corridor.", "type": "action", "weight": 2 }, { "content": "He puts his left hand in his trouser pocket.", "type": "attribute", "weight": 2 }, { "content": "His right hand is playing with a long black stick.", "type": "action", "weight": 3 }, { "content": "The background is a simple indoor environment with light-coloured walls.", "type": "scene", "weight": 2 } ] }, { "event": "As he advances, the camera gradually moves to the right. He comes to the door of a room and then stops to turn his head to look inside. The camera stops moving right and shows an iron barred door. Inside the room is a man dressed in white lying curled up on the floor with his head in his arms and his back to the camera.", "visual_elements": [ { "content": "The camera gradually moves to the right.", "type": "camera", "weight": 3 }, { "content": "The man comes to the door of a room.", "type": "action", "weight": 2 }, { "content": "The man stops.", "type": "action", "weight": 2 }, { "content": "The man turns his head to look inside.", "type": "action", "weight": 2 }, { "content": "The camera stops moving right.", "type": "camera", "weight": 3 }, { "content": "The camera shows an iron barred door.", "type": "scene", "weight": 3 }, { "content": "Behind the door is a man dressed in white lying curled up on the floor with his head in his arms and his back to the camera.", "type": "scene", "weight": 3 } ] }, { "event": "The camera switches back to the man again. He stands in front of the iron barred door, looking down to continue to observe the situation inside the room.", "visual_elements": [ { "content": "The camera switches back to the man again.", "type": "camera", "weight": 3 }, { "content": "The man stands in front of the iron barred door.", "type": "attribute", "weight": 2 }, { "content": "The man looks down to continue to observe the situation inside the room.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0065", "video_path": "DREAM-1K/313.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 3.88, "resolution": { "width": 1840, "height": 1024 }, "caption": "The video begins with the camera focusing on a young man wearing black wrestling ear protectors and a black and yellow sports vest. His right hand is held high in the air by a referee. The referee wears a black and white striped referee uniform with red wrist guards. The audience can be seen in the background, who dress in various colours and mainly in red.\nThe camera moves right and upwards as the young man's hand is put down. The young man then bents his head and walks towards his front right.\nThe camera switches to the audience, where several spectators are sitting on the steps, cheering on the competitors. On the left of the frame is a middle-aged man wearing a yellow shirt. On the right of the frame are several young women, one holding a yellow cheerleading ball, another holding a large hand shaped cheering prop. The crowd are applauding and cheering for the competitors while the yellow cheerleading ball is waving in the air.", "events": [ { "event": "The video begins with the camera focusing on a young man wearing black wrestling ear protectors and a black and yellow sports vest. His right hand is held high in the air by a referee. The referee wears a black and white striped referee uniform with red wrist guards. The audience can be seen in the background, who dress in various colours and mainly in red.", "visual_elements": [ { "content": "The camera focuses on a young man.", "type": "camera", "weight": 3 }, { "content": "A young man wears black wrestling ear protectors and a black and yellow sports vest.", "type": "attribute", "weight": 3 }, { "content": "The young man's right hand is held high in the air by a referee.", "type": "attribute", "weight": 3 }, { "content": "The referee wears a black and white striped referee uniform with red wrist guards.", "type": "attribute", "weight": 2 }, { "content": "The audience can be seen in the background, who dress in various colours and mainly in red.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves right and upwards as the young man's hand is put down. The young man then bents his head and walks towards his front right.", "visual_elements": [ { "content": "The camera moves right and upwards.", "type": "camera", "weight": 3 }, { "content": "The young man's hand is put down.", "type": "action", "weight": 2 }, { "content": "The young man bents his head and walks towards his front right.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to the audience, where several spectators are sitting on the steps, cheering on the competitors. On the left of the frame is a middle-aged man wearing a yellow shirt. On the right of the frame are several young women, one holding a yellow cheerleading ball, another holding a large hand shaped cheering prop. The crowd are applauding and cheering for the competitors while the yellow cheerleading ball is waving in the air.", "visual_elements": [ { "content": "The camera switches to the audience.", "type": "camera", "weight": 3 }, { "content": "Several spectators are sitting on the steps, cheering on the competitors.", "type": "scene", "weight": 3 }, { "content": "On the left of the frame is a middle-aged man wearing a yellow shirt.", "type": "scene", "weight": 1 }, { "content": "On the right of the frame are several young women.", "type": "scene", "weight": 2 }, { "content": "One of them is holding a yellow cheerleading ball.", "type": "scene", "weight": 2 }, { "content": "Another of them is holding a large hand shaped cheering prop.", "type": "scene", "weight": 2 }, { "content": "The crowd are applauding and cheering for the competitors.", "type": "scene", "weight": 2 }, { "content": "The yellow cheerleading ball is waving in the air.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0066", "video_path": "DREAM-1K/314.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 9.75, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with the camera focusing on a bed with white sheets and pillows. A blonde woman wearing a blue long-sleeved blouse lies on the bed,with her hair scattered on the bed and her arms resting naturally beside her head. A bedside table and a few items can be seen in the background. The woman then turns her head and looks up at the ceiling above her.\nThe camera then switches to another part of the room where two paintings hang on the wall and the city view can be seen from the window. Inside the room is a white chair and a small table with some books and items on it.\nThe camera shakes slightly. Then a hand holding the bottom of a pink ornament enters the frame from the bottom of it, where the ornament is lifted up. The owner of the hand also enters the frame. She looks upwards with the pink ornament in her hand as she continues to adjust its position.", "events": [ { "event": "The video begins with the camera focusing on a bed with white sheets and pillows. A blonde woman wearing a blue long-sleeved blouse lies on the bed,with her hair scattered on the bed and her arms resting naturally beside her head. A bedside table and a few items can be seen in the background. The woman then turns her head and looks up at the ceiling above her.", "visual_elements": [ { "content": "The camera focuses on a bed.", "type": "camera", "weight": 3 }, { "content": "The bed has white sheets and pillows.", "type": "scene", "weight": 1 }, { "content": "A blonde woman wearing a blue long-sleeved jacket lies on the bed.", "type": "scene", "weight": 3 }, { "content": "The woman's hair is scattered on the bed.", "type": "attribute", "weight": 1 }, { "content": "The woman's arms rest naturally beside her head.", "type": "attribute", "weight": 2 }, { "content": "A bedside table and a few items can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "The woman turns her head.", "type": "action", "weight": 2 }, { "content": "The woman looks up at the ceiling above her.", "type": "action", "weight": 2 } ] }, { "event": "The camera then switches to another part of the room where two paintings hang on the wall and the city view can be seen from the window. Inside the room is a white chair and a small table with some books and items on it.", "visual_elements": [ { "content": "The camera switches to another part of the room.", "type": "camera", "weight": 3 }, { "content": "Two paintings hang on the wall.", "type": "scene", "weight": 1 }, { "content": "The city view can be seen from the window.", "type": "scene", "weight": 1 }, { "content": "Inside the room is a white chair and a small table.", "type": "scene", "weight": 1 }, { "content": "Some books and items are on the table.", "type": "scene", "weight": 1 } ] }, { "event": "The camera shakes slightly. Then a hand holding the bottom of a pink ornament enters the frame from the bottom of it, where the ornament is lifted up. The owner of the hand also enters the frame. She looks upwards with the pink ornament in her hand as she continues to adjust its position.", "visual_elements": [ { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "A hand holding the bottom of a pink ornament enters the frame from the bottom of it.", "type": "action", "weight": 3 }, { "content": "The ornament is lifted up by the hand.", "type": "action", "weight": 3 }, { "content": "The owner of the hand also enters the frame.", "type": "action", "weight": 3 }, { "content": "The woman looks upwards.", "type": "action", "weight": 3 }, { "content": "The woman holds the pink ornament in her hand as she continues to adjust its position.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0067", "video_path": "DREAM-1K/315.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 5.62, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with a man and a woman in the frame. The man wears a black blouse and the woman lies on her side with her back to the camera. Her hair is blonde. The background of the room is dim, with a table lamp on the right side giving off soft lights and some items on a nearby table.\nThe man leans over on the right side of the woman. He grabs the corner of the quilt with his right hand to pull it up. He then grabs the quilt down and pulls it in his direction to ensure that it is tight over the woman. Finally, the man presses a corner of the quilt under his hand with both hands and straightens up slightly before leaning over to gaze at the woman again.", "events": [ { "event": "The video begins with a man and a woman in the frame. The man wears a black blouse and the woman lies on her side with her back to the camera. Her hair is blonde. The background of the room is dim, with a table lamp on the right side giving off soft lights and some items on a nearby table.", "visual_elements": [ { "content": "A man and a woman are in the frame.", "type": "scene", "weight": 3 }, { "content": "The man wears a black jacket.", "type": "attribute", "weight": 2 }, { "content": "The woman lies on her side with her back to the camera.", "type": "attribute", "weight": 3 }, { "content": "The woman's hair is blonde.", "type": "attribute", "weight": 2 }, { "content": "The background of the room is dim.", "type": "scene", "weight": 1 }, { "content": "A table lamp on the right side gives off soft lights.", "type": "scene", "weight": 2 }, { "content": "Some items are on a nearby table.", "type": "scene", "weight": 1 } ] }, { "event": "The man leans over on the right side of the woman. He grabs the corner of the quilt with his right hand to pull it up. He then grabs the quilt down and pulls it in his direction to ensure that it is tight over the woman. Finally, the man presses a corner of the quilt under his hand with both hands and straightens up slightly before leaning over to gaze at the woman again.", "visual_elements": [ { "content": "The man leans over on the right side of the woman.", "type": "attribute", "weight": 2 }, { "content": "The man grabs the corner of the quilt with his right hand.", "type": "action", "weight": 2 }, { "content": "The man pulls the quilt up.", "type": "action", "weight": 2 }, { "content": "He grabs the quilt down and pulls it in his direction.", "type": "action", "weight": 2 }, { "content": "The man ensures that the quilt is tight over the woman.", "type": "action", "weight": 2 }, { "content": "The man presses a corner of the quilt under his hand with both hands.", "type": "action", "weight": 2 }, { "content": "The man straightens up slightly before leaning over to gaze at the woman again.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0068", "video_path": "DREAM-1K/316.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 5.0, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with two figures in the frame, a man and a woman. The man wears a black blouse and the woman is in a dark red blouse. The man lies face down on the bed with his left hand supporting his body and his right hand pouring wine from a wine bottle into a glass in the woman's hand. The woman sits on the floor, leaning against the bed. She puts her left hand on the bed and her right hand rests on her left hand. She smiles and looks ahead. The background is a dim room with cutains drawn and soft lights.\nThe woman then picks up the glass with her right hand to drink and puts it down after taking a sip. After pouring the wine, the man holds out the bottle with his left hand and places his right hand on the neck of the bottle. The man then withdraws his left hand and takes the bottle in his right hand to put it on the ground in front right of him. At the same time, the man rises up on his toes. Finally, the man withdraws his right hand and raises it up with his feet down. He then looks ahead.", "events": [ { "event": "The video begins with two figures in the frame, a man and a woman. The man wears a black blouse and the woman is in a dark red blouse. The man lies face down on the bed with his left hand supporting his body and his right hand pouring wine from a wine bottle into a glass in the woman's hand. The woman sits on the floor, leaning against the bed. She puts her left hand on the bed and her right hand rests on her left hand. She smiles and looks ahead. The background is a dim room with cutains drawn and soft lights.", "visual_elements": [ { "content": "Two figures are in the frame, a man and a woman.", "type": "scene", "weight": 3 }, { "content": "The man wears a black blouse.", "type": "attribute", "weight": 2 }, { "content": "The woman is in a dark red blouse.", "type": "attribute", "weight": 2 }, { "content": "The man lies face down on the bed.", "type": "attribute", "weight": 3 }, { "content": "The man's left hand supports his body.", "type": "attribute", "weight": 2 }, { "content": "The man's right hand is pouring wine from a wine bottle into a glass in the woman's hand.", "type": "action", "weight": 3 }, { "content": "The woman sits on the floor.", "type": "attribute", "weight": 2 }, { "content": "The woman leans against the bed.", "type": "attribute", "weight": 2 }, { "content": "She puts her left hand on the bed.", "type": "attribute", "weight": 2 }, { "content": "The woman's right hand rests on her left hand.", "type": "attribute", "weight": 2 }, { "content": "The woman smiles and looks ahead.", "type": "action", "weight": 2 }, { "content": "The background is a dim room with cutains drawn and soft lights.", "type": "scene", "weight": 1 } ] }, { "event": "The woman then picks up the glass with her right hand to drink and puts it down after taking a sip. After pouring the wine, the man holds out the bottle with his left hand and places his right hand on the neck of the bottle. The man then withdraws his left hand and takes the bottle in his right hand to put it on the ground in front right of him. At the same time, the man rises up on his toes. Finally, the man withdraws his right hand and raises it up with his feet down. He then looks ahead.", "visual_elements": [ { "content": "The woman then picks up the glass with her right hand to drink.", "type": "action", "weight": 3 }, { "content": "The woman puts the glass down after taking a sip.", "type": "action", "weight": 3 }, { "content": "After pouring the wine, the man holds out the bottle.", "type": "action", "weight": 3 }, { "content": "The man places his right hand on the neck of the bottle.", "type": "attribute", "weight": 2 }, { "content": "The man then withdraws his left hand and takes the bottle in his right hand to put it on the ground in front right of him.", "type": "action", "weight": 3 }, { "content": "At the same time, the man rises up on his toes.", "type": "action", "weight": 2 }, { "content": "Finally, the man withdraws his right hand and raises it up with his feet down.", "type": "action", "weight": 2 }, { "content": "The man then looks ahead.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 20 }, { "index": "TUNA_0069", "video_path": "DREAM-1K/317.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 11.0, "resolution": { "width": 1824, "height": 1024 }, "caption": "The video begins with the camera focusing on a serene forest, with sunlight filtered onto the ground through the dense foliage, creating dappled light. In the centre of the frame is a clear stream with slowly flowing water which forms several small waterfalls. The waterfalls gently laps against rocks. Tall trees and dense vegetation can be seen in the background. And the sunlight shines on the water through the gaps in the leaves, creating twinkling points of light. As the camera slowly moves forward, the trees and vegetation in the background are gradually revealed to be more dense and vibrant.\nNext, the camera switches to an overhead view showing a floor covered in paving stones. On both sides of the frame are two large stone pillars. In the centre of the frame is a monk with shaven head and wearing orange robes. The monk holds a flower and walks slowly from the bottom of the frame to the top of the frame. The shadows of the stone pillars and the monk are cast on the ground. The monk is followed by another two monks in the same costume.\nThe camera moves slowly upwards as the three monks walk in unison towards their front.", "events": [ { "event": "The video begins with the camera focusing on a serene forest, with sunlight filtered onto the ground through the dense foliage, creating dappled light. In the centre of the frame is a clear stream with slowly flowing water which forms several small waterfalls. The waterfalls gently laps against rocks. Tall trees and dense vegetation can be seen in the background. And the sunlight shines on the water through the gaps in the leaves, creating twinkling points of light. As the camera slowly moves forward, the trees and vegetation in the background are gradually revealed to be more dense and vibrant.", "visual_elements": [ { "content": "The camera focuses on a serene forest.", "type": "camera", "weight": 3 }, { "content": "The sunlight is filtered onto the ground through the dense foliage, creating dappled light.", "type": "scene", "weight": 2 }, { "content": "In the centre of the frame is a clear stream.", "type": "scene", "weight": 3 }, { "content": "The water slowly flows.", "type": "scene", "weight": 3 }, { "content": "The water forms several small waterfalls.", "type": "scene", "weight": 3 }, { "content": "The waterfalls gently laps against rocks.", "type": "scene", "weight": 2 }, { "content": "Tall trees and dense vegetation can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The sunlight shines on the water through the gaps in the leaves, creating twinkling points of light.", "type": "scene", "weight": 2 }, { "content": "The camera slowly moves forward.", "type": "camera", "weight": 3 }, { "content": "The trees and vegetation in the background are gradually revealed to be more dense and vibrant.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera switches to an overhead view showing a floor covered in paving stones. On both sides of the frame are two large stone pillars. In the centre of the frame is a monk with shaven head and wearing orange robes. The monk holds a flower and walks slowly from the bottom of the frame to the top of the frame. The shadows of the stone pillars and the monk are cast on the ground. The monk is followed by another two monks in the same costume.", "visual_elements": [ { "content": "The camera switches to an overhead view.", "type": "camera", "weight": 3 }, { "content": "The camera shows a floor covered in paving stones.", "type": "attribute", "weight": 3 }, { "content": "On both sides of the frame are two large stone pillars.", "type": "attribute", "weight": 2 }, { "content": "In the centre of the frame is a monk with shaven head and wearing orange robes.", "type": "attribute", "weight": 3 }, { "content": "The monk holds a flower and walks slowly from the bottom of the frame to the top of the frame.", "type": "action", "weight": 3 }, { "content": "The shadows of the stone pillars and the monk are cast on the ground.", "type": "attribute", "weight": 2 }, { "content": "The monk is followed by another two monks in the same costume.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves slowly upwards as the three monks walk in unison towards their front.", "visual_elements": [ { "content": "The camera moves slowly upwards.", "type": "camera", "weight": 3 }, { "content": "The three monks walk in unison towards their front.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0070", "video_path": "DREAM-1K/318.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 11.5, "resolution": { "width": 1920, "height": 832 }, "caption": "The video begins with a dark night sky as the background. The camera moves to the left where a man wearing blue clothes and a red hat frowns and looks ahead as he stands on the shore. Some buildings and waves lapping against the shore can be seen in the background.\nThe camera switches to show the man standing facing the camera on a pier with two streetlights on the right of the pier. The background is a dark blue night sky. A small aeroplane flies from the distance opposite the man, gradually approaching the pier. The man stands at the end of the pier, looking up at the flying plane.\nThe man turns to face the sea as the plane flies overhead. The man takes a few steps forward and watches the plane flies away. The man then lifts his left hand to wipe his face while turning to face the camera and walking forward. Streetlight beside him swings.", "events": [ { "event": "The video begins with a dark night sky as the background. The camera moves to the left where a man wearing blue clothes and a red hat frowns and looks ahead as he stands on the shore. Some buildings and waves lapping against the shore can be seen in the background.", "visual_elements": [ { "content": "A dark night sky is the background.", "type": "scene", "weight": 2 }, { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "A man wears blue clothes and a red hat.", "type": "attribute", "weight": 3 }, { "content": "The man frowns and looks ahead.", "type": "attribute", "weight": 2 }, { "content": "The man stands on the shore.", "type": "attribute", "weight": 1 }, { "content": "Some buildings and waves lapping against the shore can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches to show the man standing facing the camera on a pier with two streetlights on the right of the pier. The background is a dark blue night sky. A small aeroplane flies from the distance opposite the man, gradually approaching the pier. The man stands at the end of the pier, looking up at the flying plane.", "visual_elements": [ { "content": "The camera switches.", "type": "camera", "weight": 3 }, { "content": "The man stands facing the camera on a pier.", "type": "attribute", "weight": 2 }, { "content": "Two streetlights are on the right of the pier.", "type": "scene", "weight": 2 }, { "content": "The background is a dark blue night sky.", "type": "scene", "weight": 1 }, { "content": "A small aeroplane flies from the distance opposite the man.", "type": "action", "weight": 3 }, { "content": "The plane approaches the pier.", "type": "action", "weight": 3 }, { "content": "The man stands at the end of the pier.", "type": "action", "weight": 2 }, { "content": "The man looks up at the flying plane.", "type": "action", "weight": 2 } ] }, { "event": "The man turns to face the sea as the plane flies overhead. The man takes a few steps forward and watches the plane flies away. The man then lifts his left hand to wipe his face while turning to face the camera and walking forward. Streetlight beside him swings.", "visual_elements": [ { "content": "The man turns to face the sea.", "type": "scene", "weight": 2 }, { "content": "The plane flies overhead.", "type": "action", "weight": 3 }, { "content": "The man takes a few steps forward.", "type": "action", "weight": 3 }, { "content": "The man watches the plane flies away.", "type": "action", "weight": 2 }, { "content": "The man lifts his left hand to wipe his face.", "type": "action", "weight": 2 }, { "content": "The man turns to face the camera.", "type": "action", "weight": 3 }, { "content": "The man walks forward.", "type": "action", "weight": 3 }, { "content": "Streetlight beside him swings.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0071", "video_path": "DREAM-1K/319.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 10.75, "resolution": { "width": 1920, "height": 832 }, "caption": "The video begins with the camera focusing on the deck of a ship. A man in a light blue uniform with dark blue stripes kneels down on one knee on the deck. A black holster is attached to his right leg. The man wears white socks and white sneakers with blue stripes. He draws on the deck with a piece of chalk in his right hand. To the man's left rear, a black dog lies quietly on the deck with a leash tied around its neck.\nThe camera moves upwards. The man gets up and walks towards a large mechanical equipment behind him. The equipment has two huge rollers and some complex mechanical structure. The man walks over to the equipment and rests his left hand on it. The man climbs on the equipment and then stands in the centre of it. The man crosses his arms over his chest and speaks facing the camera. The background is a wide expanse of sea and sky. The man's red hat stands out in the sunlight.", "events": [ { "event": "The video begins with the camera focusing on the deck of a ship. A man in a light blue uniform with dark blue stripes kneels down on one knee on the deck. A black holster is attached to his right leg. The man wears white socks and white sneakers with blue stripes. He draws on the deck with a piece of chalk in his right hand. To the man's left rear, a black dog lies quietly on the deck with a leash tied around its neck.", "visual_elements": [ { "content": "The video begins with the camera focusing on the deck of a ship.", "type": "camera", "weight": 3 }, { "content": "A man in a light blue uniform with dark blue stripes kneels down on one knee on the deck.", "type": "attribute", "weight": 3 }, { "content": "A black holster is attached to the man's right leg.", "type": "attribute", "weight": 2 }, { "content": "The man wears white socks and white sneakers with blue stripes.", "type": "attribute", "weight": 2 }, { "content": "He draws on the deck with a piece of chalk in his right hand.", "type": "action", "weight": 3 }, { "content": "To the man's left rear, a black dog lies quietly on the deck", "type": "scene", "weight": 2 }, { "content": "A leash is tied around the dog's neck.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves upwards. The man gets up and walks towards a large mechanical equipment behind him. The equipment has two huge rollers and some complex mechanical structure. The man walks over to the equipment and rests his left hand on it. The man climbs on the equipment and then stands in the centre of it. The man crosses his arms over his chest and speaks facing the camera. The background is a wide expanse of sea and sky. The man's red hat stands out in the sunlight.", "visual_elements": [ { "content": "The camera moves upwards.", "type": "camera", "weight": 3 }, { "content": "The man gets up and walks towards a large mechanical equipment behind him.", "type": "action", "weight": 3 }, { "content": "The equipment has two huge rollers and some complex mechanical structure.", "type": "action", "weight": 2 }, { "content": "The man walks over to the equipment.", "type": "action", "weight": 2 }, { "content": "The man rests his left hand on the equipment.", "type": "action", "weight": 2 }, { "content": "The man climbs on the equipment.", "type": "action", "weight": 3 }, { "content": "The man stands in the centre of the equipment.", "type": "attribute", "weight": 2 }, { "content": "The man crosses his arms over his chest.", "type": "attribute", "weight": 2 }, { "content": "The man speaks facing the camera.", "type": "action", "weight": 3 }, { "content": "The background is a wide expanse of sea and sky.", "type": "scene", "weight": 2 }, { "content": "The man's red hat stands out in the sunlight.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0072", "video_path": "DREAM-1K/320.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.25, "resolution": { "width": 1920, "height": 832 }, "caption": "The video begins with the camera focusing on a man wearing a black suit and a red woolen hat. He stands on an iron bridge over a pond, holding a fish in his left hand. The man walks forward facing the camera and occasionally holds onto the guardrail on his right. He steps off the iron bridge and turns left. The background is an open sky and some buildings with white fabrics hanging from them. Some antennas and satellite receivers are alongside the buildings.\nThe camera switches to the man standing on a ladder. He is still carrying the fish in his hand. The background is an open ocean with a lighthouse in the distance, whose structures are clearly visible. The man stands at the top of the ladder and holds the fish up in the air with his left hand.\nSuddenly, a huge black killer whale leaps out of the water and opens its mouth wide. The killer whale bites away the fish. The man puts his left hand down.\nFinally, the camera switches to an indoor scene with an old-fashioned alarm clock showing the time being 2:03. The window blinds are open, through which you can see trees and some faint lights outside. The camera shakes slightly.", "events": [ { "event": "The video begins with the camera focusing on a man wearing a black suit and a red woolen hat. He stands on an iron bridge over a pond, holding a fish in his left hand. The man walks forward facing the camera and occasionally holds onto the guardrail on his right. He steps off the iron bridge and turns left. The background is an open sky and some buildings with white fabrics hanging from them. Some antennas and satellite receivers are alongside the buildings.", "visual_elements": [ { "content": "The camera focuses on a man.", "type": "camera", "weight": 3 }, { "content": "The man wears a black suit and a red woolen hat.", "type": "attribute", "weight": 3 }, { "content": "He stands on an iron bridge over a pond.", "type": "attribute", "weight": 2 }, { "content": "The man holds a fish in his left hand.", "type": "attribute", "weight": 3 }, { "content": "The man walks forward facing the camera and occasionally holds onto the guardrail on his right.", "type": "action", "weight": 3 }, { "content": "The man steps off the iron bridge and turns left.", "type": "action", "weight": 3 }, { "content": "The background is an open sky and some buildings.", "type": "scene", "weight": 1 }, { "content": "White fabrics hang from the buildings.", "type": "scene", "weight": 1 }, { "content": "Some antennas and satellite receivers are alongside the buildings.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches to the man standing on a ladder. He is still carrying the fish in his hand. The background is an open ocean with a lighthouse in the distance, whose structures are clearly visible. The man stands at the top of the ladder and holds the fish up in the air with his left hand.", "visual_elements": [ { "content": "The camera switches to the man standing on a ladder.", "type": "camera", "weight": 3 }, { "content": "He is still carrying the fish in his hand.", "type": "attribute", "weight": 2 }, { "content": "The background is an open ocean.", "type": "scene", "weight": 1 }, { "content": "The man stands at the top of the ladder.", "type": "attribute", "weight": 3 }, { "content": "The man holds the fish up in the air with his left hand.", "type": "action", "weight": 3 } ] }, { "event": "Suddenly, a huge black killer whale leaps out of the water and opens its mouth wide. The killer whale bites away the fish. The man puts his left hand down.", "visual_elements": [ { "content": "A huge black killer whale leaps out of the water.", "type": "action", "weight": 3 }, { "content": "The killer whale opens its mouth wide.", "type": "action", "weight": 3 }, { "content": "The killer whale bites away the fish.", "type": "action", "weight": 3 }, { "content": "The man puts his left hand down.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera switches to an indoor scene with an old-fashioned alarm clock showing the time being 2:03. The window blinds are open, through which you can see trees and some faint lights outside. The camera shakes slightly.", "visual_elements": [ { "content": "The camera switches to an indoor scene.", "type": "camera", "weight": 3 }, { "content": "An old-fashioned alarm clock is in the scene.", "type": "attribute", "weight": 2 }, { "content": "The time is 2:03.", "type": "attribute", "weight": 1 }, { "content": "The window blinds are open.", "type": "attribute", "weight": 1 }, { "content": "Through the window you can see trees and some faint lights outside.", "type": "scene", "weight": 1 }, { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0073", "video_path": "DREAM-1K/321.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 4.25, "resolution": { "width": 1920, "height": 832 }, "caption": "The video begins with the camera focusing on a small room with a bed on the left, which is equipped with white cabinets on both sides. A white bag with black stripes hangs from the cabinet near the camera. The bag has the words \"Air Kentucky\" on it. In the centre of the room, the desk and cabinet are placed against the back wall. On the right side of the room, a sink and a mirror are placed against the right wall. Inside the room is a man and woman sitting on the bed.\nThe man near the camera wears a blue uniform with a red cap and white snerkers on his feet. The woman wears a green jacket, khaki trousers and black boots on her feet. Crossing his arms over his lap, the man looks down at the front and moves his fingers. The woman holds two items in both hands and turns her head to look at the man. The two are talking to each other.\nNext, the camera switches to a corridor. The woman walks towards the camera. She wears a bandage on her left hand and holds a brown bag in her right hand. The corridor has light coloured walls, multiple round lights at the top and a grey floor. There is a fire extinguisher and a chair on the left side of the corridor and multiple room doors on the right side. At the end of the corridor an elderly man in blue uniform climbs down the stairs and walks towards the woman.", "events": [ { "event": "The video begins with the camera focusing on a small room with a bed on the left, which is equipped with white cabinets on both sides. A white bag with black stripes hangs from the cabinet near the camera. The bag has the words \"Air Kentucky\" on it. In the centre of the room, the desk and cabinet are placed against the back wall. On the right side of the room, a sink and a mirror are placed against the right wall. Inside the room is a man and woman sitting on the bed.", "visual_elements": [ { "content": "The camera focuses on a small room.", "type": "camera", "weight": 3 }, { "content": "A bed is on the left side of the room.", "type": "attribute", "weight": 2 }, { "content": "The bed is equipped with white cabinets on both sides.", "type": "attribute", "weight": 1 }, { "content": "A white bag with black stripes hangs from the cabinet near the camera.", "type": "attribute", "weight": 2 }, { "content": "The bag has the words \"Air Kentucky\" on it.", "type": "attribute", "weight": 2 }, { "content": "In the centre of the room, the desk and cabinet are placed against the back wall.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the room, a sink and a mirror are placed against the right wall.", "type": "attribute", "weight": 2 }, { "content": "Inside the room is a man and woman sitting on the bed.", "type": "attribute", "weight": 3 } ] }, { "event": "The man near the camera wears a blue uniform with a red cap and white snerkers on his feet. The woman wears a green jacket, khaki trousers and black boots on her feet. Crossing his arms over his lap, the man looks down at the front and moves his fingers. The woman holds two items in both hands and turns her head to look at the man. The two are talking to each other.", "visual_elements": [ { "content": "The man near the camera wears a blue uniform with a red cap and white snerkers on his feet.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a green jacket, khaki trousers and black boots on her feet.", "type": "attribute", "weight": 2 }, { "content": "Crossing his arms over his lap, the man looks down at the front.", "type": "attribute", "weight": 2 }, { "content": "The man moves his fingers.", "type": "action", "weight": 3 }, { "content": "The woman holds two items in both hands and turns her head to look at the man.", "type": "action", "weight": 3 }, { "content": "The two are talking to each other.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to a corridor. The woman walks towards the camera. She wears a bandage on her left hand and holds a brown bag in her right hand. The corridor has light coloured walls, multiple round lights at the top and a grey floor. There is a fire extinguisher and a chair on the left side of the corridor and multiple room doors on the right side. At the end of the corridor an elderly man in blue uniform climbs down the stairs and walks towards the woman.", "visual_elements": [ { "content": "The camera switches to a corridor.", "type": "camera", "weight": 3 }, { "content": "The woman walks towards the camera.", "type": "action", "weight": 3 }, { "content": "The woman wears a bandage on her left hand.", "type": "attribute", "weight": 2 }, { "content": "The woman holds a brown bag in her right hand.", "type": "attribute", "weight": 2 }, { "content": "The corridor has light coloured walls, multiple round lights at the top and a grey floor.", "type": "scene", "weight": 1 }, { "content": "There is a fire extinguisher and a chair on the left side of the corridor and multiple room doors on the right side.", "type": "scene", "weight": 1 }, { "content": "At the end of the corridor an elderly man in a blue uniform climbs down the stairs.", "type": "action", "weight": 3 }, { "content": "The elderly man walks towards the woman.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0074", "video_path": "DREAM-1K/322.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 4.5, "resolution": { "width": 1920, "height": 832 }, "caption": "The video begins with the camera focusing on an indoor scene. The background is a simply decorated room and a painting of a voyage hanging on the wall to the right of the frame. At the back of the room is a large cylindrical metal device. On the left side of the room is a bar behind which stands a man wearing a red hat and a black suit with his head bowed. Next to the bar sits a man wearing a red hat and a black suit. The man sitting by the bar holds a glass of wine in both hands. He then puts down the glass with his left hand and puts his left hand on his hip. A table lamp on the bar shines soft lights.\nOn a sofa in the middle of the room sits a white-haired man in a black suit. The white-haired man puts his hands on his knees and leans forward to look at the camera as he speaks. Behind the white-haired man stands a long table, lying across the room. On the right side of the long table sits a woman with long hair. The long-haired woman faces the camera with a cigarette in her right hand as she looks down at something on the tabletop. A young man half crouches in front of the cylindrical device which is behind the woman. The young man has curly hair and wears white clothes. The left side of the young man's body faces the camera. A bearded man in uniform stands front right of the young man. The young man and the long bearded man are communicating and passing tools. The long bearded man is repairing a light on the wall.\nSuddenly, the lights in the room go out and the entire scene plunges into darkness. A few seconds later, the light on the left side of the screen flickers. The two men at the bar look up.", "events": [ { "event": "The video begins with the camera focusing on an indoor scene. The background is a simply decorated room and a painting of a voyage hanging on the wall to the right of the frame. At the back of the room is a large cylindrical metal device. On the left side of the room is a bar behind which stands a man wearing a red hat and a black suit with his head bowed. Next to the bar sits a man wearing a red hat and a black suit. The man sitting by the bar holds a glass of wine in both hands. He then puts down the glass with his left hand and puts his left hand on his hip. A table lamp on the bar shines soft lights.", "visual_elements": [ { "content": "The camera focuses on an indoor scene.", "type": "camera", "weight": 3 }, { "content": "The background is a simply decorated room.", "type": "scene", "weight": 1 }, { "content": "A painting of a voyage hangs on the wall to the right of the frame.", "type": "scene", "weight": 1 }, { "content": "At the back of the room is a large cylindrical metal device.", "type": "scene", "weight": 2 }, { "content": "On the left side of the room is a bar.", "type": "scene", "weight": 2 }, { "content": "Behind the bar stands a man.", "type": "scene", "weight": 2 }, { "content": "The standing man wears a red hat and a black suit", "type": "attribute", "weight": 2 }, { "content": "The man standing behind the bar bows his head.", "type": "attribute", "weight": 1 }, { "content": "Next to the bar sits a man wearing a red hat and a black suit.", "type": "attribute", "weight": 2 }, { "content": "The man sitting by the bar holds a glass of wine in both hands.", "type": "attribute", "weight": 2 }, { "content": "The man sitting by the bar puts down the glass with his left hand.", "type": "action", "weight": 1 }, { "content": "The man sitting by the bar puts his left hand on his hip.", "type": "action", "weight": 1 }, { "content": "A table lamp on the bar shines soft lights.", "type": "scene", "weight": 1 } ] }, { "event": "On a sofa in the middle of the room sits a white-haired man in a black suit. The white-haired man puts his hands on his knees and leans forward to look at the camera as he speaks. Behind the white-haired man stands a long table, lying across the room. On the right side of the long table sits a woman with long hair. The long-haired woman faces the camera with a cigarette in her right hand as she looks down at something on the tabletop. A young man half crouches in front of the cylindrical device which is behind the woman. The young man has curly hair and wears white clothes. The left side of the young man's body faces the camera. A bearded man in uniform stands front right of the young man. The young man and the long bearded man are communicating and passing tools. The long bearded man is repairing a light on the wall.", "visual_elements": [ { "content": "On a sofa in the middle of the room sits a white-haired man.", "type": "attribute", "weight": 3 }, { "content": "The white-haired man is in a black suit.", "type": "attribute", "weight": 2 }, { "content": "The white-haired man puts his hands on his knees.", "type": "attribute", "weight": 2 }, { "content": "The white-haired man leans forward.", "type": "attribute", "weight": 2 }, { "content": "The white-haired man looks at the camera as he speaks.", "type": "action", "weight": 3 }, { "content": "Behind the white-haired man stands a long table, lying across the room.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the long table sits a woman with long hair.", "type": "attribute", "weight": 3 }, { "content": "The long-haired woman faces the camera with a cigarette in her right hand as she looks down at something on the tabletop.", "type": "attribute", "weight": 3 }, { "content": "A young man half crouches in front of the cylindrical device which is behind the woman.", "type": "attribute", "weight": 3 }, { "content": "The young man has curly hair and wears white clothes.", "type": "attribute", "weight": 2 }, { "content": "The left side of the young man's body faces the camera.", "type": "attribute", "weight": 2 }, { "content": "A bearded man in uniform stands front right of the young man.", "type": "attribute", "weight": 3 }, { "content": "The young man and the long bearded man are communicating and passing tools.", "type": "action", "weight": 3 }, { "content": "The long bearded man is repairing a light on the wall.", "type": "action", "weight": 2 } ] }, { "event": "Suddenly, the lights in the room go out and the entire scene plunges into darkness. A few seconds later, the light on the left side of the screen flickers. The two men at the bar look up.", "visual_elements": [ { "content": "The lights in the room go out.", "type": "scene", "weight": 3 }, { "content": "The entire scene plunges into darkness.", "type": "scene", "weight": 3 }, { "content": "The light on the left side of the screen flickers.", "type": "scene", "weight": 3 }, { "content": "The two men at the bar look up.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 31 }, { "index": "TUNA_0075", "video_path": "DREAM-1K/323.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 6.0, "resolution": { "width": 1920, "height": 960 }, "caption": "The video begins with the camera focusing on a woman whose hair on the right side has been shaved. She stands in a room with a closed door and towels hanging on the wall in the background. She wears in a black halter top and a bracelet on her hand. She holds an electric razor in her right hand and puts her left hand on her head.\nNext, the woman begins to shave her head with the razor. The razor moves on the right side of her scalp. The woman turns to the right as she shaves her hair.\nShe then smiles for the camera. Next, she turns to the left and puts her left hand on her head to hold down her hair as she continues to shave with the razor in her right hand.", "events": [ { "event": "The video begins with the camera focusing on a woman whose hair on the right side has been shaved. She stands in a room with a closed door and towels hanging on the wall in the background. She wears in a black halter top and a bracelet on her hand. She holds an electric razor in her right hand and puts her left hand on her head.", "visual_elements": [ { "content": "The camera focuses on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman stands in a room.", "type": "attribute", "weight": 2 }, { "content": "The woman's hair on the right side has been shaved.", "type": "attribute", "weight": 3 }, { "content": "A closed door and towels hanging on the wall are in the background.", "type": "scene", "weight": 1 }, { "content": "She holds an electric razor in her right hand and rests her left hand on her head.", "type": "attribute", "weight": 2 }, { "content": "The woman holds an electric razor in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The woman puts her left hand on her head.", "type": "action", "weight": 3 } ] }, { "event": "Next, the woman begins to shave her head with the razor. The razor moves on the right side of her scalp. The woman turns to the right as she shaves her hair.", "visual_elements": [ { "content": "The woman begins to shave her head with the razor.", "type": "action", "weight": 3 }, { "content": "The razor moves on the right side of her scalp.", "type": "action", "weight": 2 }, { "content": "The woman turns to the right as she shaves her hair.", "type": "action", "weight": 3 } ] }, { "event": "She then smiles for the camera. Next, she turns to the left and puts her left hand on her head to hold down her hair as she continues to shave with the razor in her right hand.", "visual_elements": [ { "content": "The woman smiles for the camera.", "type": "action", "weight": 3 }, { "content": "The woman turns to the left.", "type": "action", "weight": 3 }, { "content": "The woman puts her left hand on her head to hold down her hair.", "type": "action", "weight": 3 }, { "content": "The woman continues to shave with the razor in her right hand.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0076", "video_path": "DREAM-1K/324.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 6.38, "resolution": { "width": 1920, "height": 960 }, "caption": "The camera focuses on a bathroom. There are two mirrors in the bathroom. A woman stands in front of the mirror on the left side of the screen. The woman wears a white silk robe and her long hair is tied behind her head. The wall lamp above the mirror gives off soft lights that illuminate the whole scene.\nThe woman puts her hands on the sink and looks at herself in the mirror. The woman raises her hands to put them behind her head. The woman holds a comb in her right hand. The woman unlooses her hair with both hands. The woman brushes her hair while looking at herself in the mirror.", "events": [ { "event": "The camera focuses on a bathroom. There are two mirrors in the bathroom. A woman stands in front of the mirror on the left side of the screen. The woman wears a white silk robe and her long hair is tied behind her head. The wall lamp above the mirror gives off soft lights that illuminate the whole scene.", "visual_elements": [ { "content": "The camera focuses on a bathroom.", "type": "camera", "weight": 3 }, { "content": "There are two mirrors in the bathroom.", "type": "attribute", "weight": 2 }, { "content": "A woman stands in front of the mirror on the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a white silk robe and her long hair is tied behind her head.", "type": "attribute", "weight": 3 }, { "content": "The wall lamp above the mirror gives off soft lights that illuminate the whole scene.", "type": "scene", "weight": 2 } ] }, { "event": "The woman puts her hands on the sink and looks at herself in the mirror. The woman raises her hands to put them behind her head. The woman holds a comb in her right hand. The woman unlooses her hair with both hands. The woman brushes her hair while looking at herself in the mirror.", "visual_elements": [ { "content": "The woman puts her hands on the sink.", "type": "attribute", "weight": 2 }, { "content": "The woman looks at herself in the mirror.", "type": "attribute", "weight": 2 }, { "content": "The woman raises her hands to put them behind her head.", "type": "action", "weight": 3 }, { "content": "The woman holds a comb in her right hand.", "type": "action", "weight": 3 }, { "content": "The woman unlooses her hair with both hands.", "type": "action", "weight": 3 }, { "content": "The woman brushes her hair while looking at herself in the mirror.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0077", "video_path": "DREAM-1K/325.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 4.5, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins by showing the entrance to a summer camp called \"CAMP NIGHTWING\". The camera looks down from a high place to show a wide camp with many wooden buildings and trees in the background. As the camera moves upward, it reveals many children doing activities in the camp. Two girls at the entrance walk towards the centre of the camp with their backs to the camera. The girl on the left wears blue shorts and a patterned blouse while the girl on the right wears brown shorts and a striped vest.\nNext, the camera switches to a pier by the lake. A boy, in green shorts and a naked upper body, runs from the dock towards the lake. There is a couple sitting and kissing on the pier. The man wears a white tank top and red shorts, while the woman is in a blue swimsuit. A girl in a striped swimsuit with a green and white towel in her right hand bends down as she passes the couple to interrupt their kiss. The boy runs to the end of the pier and jumps into the lake with a splash.\nThe camera switches again to show a notice board by the lake which reads \"CAMP NIGHTWING LAKE RULES\" and lists various rules for the lake area. In the background, some people are swimming in the lake and the person sitting on the lifeguard chair is observing the lake.\nFinally, the camera switches to an archery field. A girl, wearing a white and purple T-shirt and blue jeans, is drawing a bow and preparing to shoot an arrow. Next to her is a boy wearing a blue striped vest and brown shorts. The boy is holding an arrow in his hand and ready to shoot. Several targets can be seen in the background, surrounded by green grass and trees, with sunlight spilling on the ground through the leaves.", "events": [ { "event": "The video begins by showing the entrance to a summer camp called \"CAMP NIGHTWING\". The camera looks down from a high place to show a wide camp with many wooden buildings and trees in the background. As the camera moves upward, it reveals many children doing activities in the camp. Two girls at the entrance walk towards the centre of the camp with their backs to the camera. The girl on the left wears blue shorts and a patterned blouse while the girl on the right wears brown shorts and a striped vest.", "visual_elements": [ { "content": "The camera shows the entrance to a summer camp called \"CAMP NIGHTWING\".", "type": "scene", "weight": 2 }, { "content": "The camera looks down from a high place to show a wide camp.", "type": "camera", "weight": 3 }, { "content": "Many wooden buildings and trees are in the background.", "type": "scene", "weight": 2 }, { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "Many children are doing activities in the camp.", "type": "scene", "weight": 3 }, { "content": "Two girls at the entrance walk towards the centre of the camp with their backs to the camera.", "type": "action", "weight": 3 }, { "content": "The girl on the left wears blue shorts and a patterned blouse.", "type": "attribute", "weight": 2 }, { "content": "The girl on the right wears brown shorts and a striped vest.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera switches to a pier by the lake. A boy, in green shorts and a naked upper body, runs from the dock towards the lake. There is a couple sitting and kissing on the pier. The man wears a white tank top and red shorts, while the woman is in a blue swimsuit. A girl in a striped swimsuit with a green and white towel in her right hand bends down as she passes the couple to interrupt their kiss. The boy runs to the end of the pier and jumps into the lake with a splash.", "visual_elements": [ { "content": "The camera switches to a pier by the lake.", "type": "camera", "weight": 3 }, { "content": "A boy is in green shorts and a naked upper body.", "type": "attribute", "weight": 3 }, { "content": "The boy with a naked upper body runs from the dock towards the lake.", "type": "action", "weight": 2 }, { "content": "There is a couple sitting and kissing on the pier.", "type": "action", "weight": 1 }, { "content": "The man wears a white tank top and red shorts.", "type": "attribute", "weight": 1 }, { "content": "The woman is in a blue swimsuit.", "type": "attribute", "weight": 1 }, { "content": "A girl is in a striped swimsuit with a green and white towel in her right hand.", "type": "attribute", "weight": 1 }, { "content": "The girl with the towel bends down as she passes the couple to interrupt the couple's kiss.", "type": "action", "weight": 1 }, { "content": "The boy with a naked upper body runs to the end of the pier.", "type": "action", "weight": 1 }, { "content": "The boy with a naked upper body jumps into the lake with a splash.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again to show a notice board by the lake which reads \"CAMP NIGHTWING LAKE RULES\" and lists various rules for the lake area. In the background, some people are swimming in the lake and the person sitting on the lifeguard chair is observing the lake.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "This time the camera shows a notice board by the lake which reads \"CAMP NIGHTWING LAKE RULES\".", "type": "attribute", "weight": 2 }, { "content": "The notice board lists various rules for the lake area.", "type": "attribute", "weight": 1 }, { "content": "In the background, some people are swimming in the lake and the person sitting on the lifeguard chair is observing the lake.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera switches to an archery field. A girl, wearing a white and purple T-shirt and blue jeans, is drawing a bow and preparing to shoot an arrow. Next to her is a boy wearing a blue striped vest and brown shorts. The boy is holding an arrow in his hand and ready to shoot. Several targets can be seen in the background, surrounded by green grass and trees, with sunlight spilling on the ground through the leaves.", "visual_elements": [ { "content": "The camera switches to an archery field.", "type": "camera", "weight": 3 }, { "content": "A girl wears a white and purple T-shirt and blue jeans.", "type": "attribute", "weight": 2 }, { "content": "The girl in blue jeans is drawing a bow and preparing to shoot an arrow.", "type": "action", "weight": 3 }, { "content": "A boy wears a blue striped vest and brown shorts.", "type": "attribute", "weight": 1 }, { "content": "The in the blue striped vest is holding an arrow in his hand and ready to shoot.", "type": "attribute", "weight": 2 }, { "content": "Several targets can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "The targets are surrounded by green grass and trees.", "type": "scene", "weight": 1 }, { "content": "The sunlight spills on the ground through the leaves.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 30 }, { "index": "TUNA_0078", "video_path": "DREAM-1K/326.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 4.5, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with two figures in the frame. The person on the left side of the frame lies on the ground, wearing brown short sleeves and black shorts. The woman on the right sits on the ground, dressing in a white tank top and shorts. The background is a dim cave with red earth covering the floor and some faint rock structures on the walls. There is a faint red light source on the woman's right side.\nThe camera moves slightly down to the right. The woman adjusts her sitting posture, and then stretches out her right hand to take the flashlight placed near her right leg. The light from the flashlight illuminates her legs and the ground around her. She shines the flashlight at the ground and leans her body to the left to pick up a book on the ground with her left hand. She then holds the flashlight in her left hand and begins to flip through the book with her right hand.", "events": [ { "event": "The video begins with two figures in the frame. The person on the left side of the frame lies on the ground, wearing brown short sleeves and black shorts. The woman on the right sits on the ground, dressing in a white tank top and shorts. The background is a dim cave with red earth covering the floor and some faint rock structures on the walls. There is a faint red light source on the woman's right side.", "visual_elements": [ { "content": "Two figures are in the frame.", "type": "scene", "weight": 3 }, { "content": "The person on the left side of the frame lies on the ground.", "type": "scene", "weight": 2 }, { "content": "The person lying on the ground wears brown short sleeves and black shorts.", "type": "attribute", "weight": 1 }, { "content": "The woman on the right sits on the ground.", "type": "scene", "weight": 2 }, { "content": "The woman dresses in a white tank top and shorts.", "type": "attribute", "weight": 2 }, { "content": "The background is a dim cave.", "type": "scene", "weight": 2 }, { "content": "Some red earth covers the floor.", "type": "scene", "weight": 1 }, { "content": "Some faint rock structures are on the cave walls.", "type": "scene", "weight": 1 }, { "content": "There is a faint red light source on the woman's right side.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves slightly down to the right. The woman adjusts her sitting posture, and then stretches out her right hand to take the flashlight placed near her right leg. The light from the flashlight illuminates her legs and the ground around her. She shines the flashlight at the ground and leans her body to the left to pick up a book on the ground with her left hand. She then holds the flashlight in her left hand and begins to flip through the book with her right hand.", "visual_elements": [ { "content": "The camera moves slightly down to the right.", "type": "camera", "weight": 3 }, { "content": "The woman adjusts her sitting posture.", "type": "action", "weight": 1 }, { "content": "The woman stretches out her right hand to take the flashlight placed near her right leg.", "type": "action", "weight": 3 }, { "content": "The light from the flashlight illuminates her legs and the ground around her.", "type": "scene", "weight": 2 }, { "content": "The woman shines the flashlight at the ground.", "type": "action", "weight": 3 }, { "content": "The woman leans her body to the left.", "type": "action", "weight": 3 }, { "content": "The woman picks up a book on the ground with her left hand.", "type": "action", "weight": 3 }, { "content": "She takes the book to her side.", "type": "action", "weight": 2 }, { "content": "The woman holds the flashlight in her left hand and begins to flip through the book with her right hand.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0079", "video_path": "DREAM-1K/327.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 8.0, "resolution": { "width": 1920, "height": 1024 }, "caption": "The video begins with a woman and two children in the frame. The woman wears a grey-blue short-sleeved top and sits on the bed with her legs crossed and a book in her hand. In the middle of the bed lies the little girl who wears light-coloured pajamas and holds a stuffed toy. On the right side of the frame is an older black little girl wearing blue patterned pajamas with her arms crossed over her chest. She turns her head to look at the right side. In the background, a small yellow lamp can be seen on the bedside table next to some books and toys.\nNext, the woman closes the book, takes out a mobile phone and sits up. The two children also sit up and get closer to the woman. The woman holds the mobile phone up in front of the two children. The two children adjust their positions to look at the phone.", "events": [ { "event": "The video begins with a woman and two children in the frame. The woman wears a grey-blue short-sleeved top and sits on the bed with her legs crossed and a book in her hand. In the middle of the bed lies the little girl who wears light-coloured pajamas and holds a stuffed toy. On the right side of the frame is an older black little girl wearing blue patterned pajamas with her arms crossed over her chest. She turns her head to look at the right side. In the background, a small yellow lamp can be seen on the bedside table next to some books and toys.", "visual_elements": [ { "content": "A woman and two children are in the frame.", "type": "scene", "weight": 3 }, { "content": "The woman wears a grey-blue short-sleeved top.", "type": "attribute", "weight": 2 }, { "content": "The woman sits on the bed with her legs crossed.", "type": "attribute", "weight": 1 }, { "content": "The woman holds a book in her hand.", "type": "attribute", "weight": 2 }, { "content": "In the middle of the bed lies the little girl.", "type": "attribute", "weight": 3 }, { "content": "The little girl wears light-coloured pajamas.", "type": "attribute", "weight": 1 }, { "content": "The little girl holds a stuffed toy.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the frame is an older black little girl.", "type": "attribute", "weight": 3 }, { "content": "The black little girl wears blue patterned pajamas.", "type": "attribute", "weight": 1 }, { "content": "The black little girl crosses her hands over her chest.", "type": "attribute", "weight": 2 }, { "content": "The black little girl turns her head to look at the right side.", "type": "attribute", "weight": 2 }, { "content": "In the background, a small yellow lamp can be seen on the bedside table next to some books and toys.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the woman closes the book, takes out a mobile phone and sits up. The two children also sit up and get closer to the woman. The woman holds the mobile phone up in front of the two children. The two children adjust their positions to look at the phone.", "visual_elements": [ { "content": "The woman closes the book.", "type": "action", "weight": 2 }, { "content": "The woman takes out a mobile phone.", "type": "action", "weight": 3 }, { "content": "The woman sits up.", "type": "action", "weight": 3 }, { "content": "The two children also sit up.", "type": "action", "weight": 3 }, { "content": "The two childern get closer to the woman.", "type": "action", "weight": 3 }, { "content": "The woman holds the mobile phone up in front of the two children.", "type": "action", "weight": 3 }, { "content": "The two children adjust their positions to look at the phone.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 19 }, { "index": "TUNA_0080", "video_path": "DREAM-1K/328.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 5.38, "resolution": { "width": 1920, "height": 800 }, "caption": "The camera focuses on the inside of a car with a young man in the back seat wearing a light grey long-sleeved top. The driver in front of the man is a middle-aged man wearing headphones. The young man leans forward slightly and moves to his right. The camera moves to the left.\nThe camera switches to the outside of the car. The yellow taxi has a lot of numbers written on the body. In the background are some light coloured residential buildings. The young man opens the car door and then leans his body outwards to get out of the car quickly. The young man shuts the door with his left hand and begins to run forward. Following the man's movement, the camera moves to the right.\nThe camera switches again to show an electronic display in the street. Below the display are several workers wearing reflective vests and safety helmets. As the camera zooms in, the words “COME HOME DIPSHIT” are shown in red LED lights on the display.", "events": [ { "event": "The camera focuses on the inside of a car with a young man in the back seat wearing a light grey long-sleeved top. The driver in front of the man is a middle-aged man wearing headphones. The young man leans forward slightly and moves to his right. The camera moves to the left.", "visual_elements": [ { "content": "The camera focuses on the inside of a car.", "type": "camera", "weight": 3 }, { "content": "A young man in the back seat wearing a light grey long-sleeved top.", "type": "attribute", "weight": 3 }, { "content": "The driver in front of the man is a middle-aged man wearing headphones.", "type": "attribute", "weight": 2 }, { "content": "The young man leans forward slightly.", "type": "attribute", "weight": 3 }, { "content": "The young man moves to his right.", "type": "action", "weight": 3 }, { "content": "The camera moves to the left.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches to the outside of the car. The yellow taxi has a lot of numbers written on the body. In the background are some light coloured residential buildings. The young man opens the car door and then leans his body outwards to get out of the car quickly. The young man shuts the door with his left hand and begins to run forward. Following the man's movement, the camera moves to the right.", "visual_elements": [ { "content": "The camera switches to the outside of the car.", "type": "camera", "weight": 3 }, { "content": "The yellow taxi has a lot of numbers written on the body.", "type": "scene", "weight": 3 }, { "content": "In the background are some light coloured residential buildings.", "type": "scene", "weight": 1 }, { "content": "The young man opens the car door.", "type": "action", "weight": 3 }, { "content": "The young man leans his body outwards.", "type": "action", "weight": 2 }, { "content": "The young man get out of the car quickly.", "type": "action", "weight": 3 }, { "content": "The young man shuts the door with his left hand.", "type": "action", "weight": 3 }, { "content": "The young man begins to run forward.", "type": "action", "weight": 3 }, { "content": "Following the man's movement, the camera moves to the right.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches again to show an electronic display in the street. Below the display are several workers wearing reflective vests and safety helmets. As the camera zooms in, the words “COME HOME DIPSHIT” are shown in red LED lights on the display.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The mamera shows an electronic display in the street.", "type": "scene", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The words “COME HOME DIPSHIT” are shown in red LED lights on the display.", "type": "attribute", "weight": 3 }, { "content": "Below the display are several workers wearing reflective vests and safety helmets.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0081", "video_path": "DREAM-1K/329.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.38, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with the camera focusing on a group of young people who are attending a lively party. Many young men and women are dancing in the frame. A woman in a pink blouse is in the crowd. She stands in the middle of the frame and raises her arms in the air as she dances to the music. Next to her is a man in a colourful shirt who is also waving his arms to the music. Below the centre of the screen, a short haired woman in a purple top dances among the crowd with her tongue out.\nNext, the camera switches to a close-up focusing on the first man and woman. The woman in pink top puts her arms around the man's neck. The two then kiss. They are surrounded by many other young people who are jumping around with their hands up.", "events": [ { "event": "The video begins with the camera focusing on a group of young people who are attending a lively party. Many young men and women are dancing in the frame. A woman in a pink blouse is in the crowd. She stands in the middle of the frame and raises her arms in the air as she dances to the music. Next to her is a man in a colourful shirt who is also waving his arms to the music. Below the centre of the screen, a short haired woman in a purple top dances among the crowd with her tongue out.", "visual_elements": [ { "content": "The camera focuses on a group of young people.", "type": "camera", "weight": 3 }, { "content": "They are attending a lively party.", "type": "scene", "weight": 3 }, { "content": "Many young men and women are dancing in the frame.", "type": "scene", "weight": 3 }, { "content": "A woman in a pink blouse is in the crowd.", "type": "attribute", "weight": 2 }, { "content": "She stands in the middle of the frame.", "type": "attribute", "weight": 2 }, { "content": "The woman raises her arms in the air.", "type": "attribute", "weight": 2 }, { "content": "The woman dances to the music.", "type": "action", "weight": 2 }, { "content": "Next to her is a man in a colourful shirt.", "type": "attribute", "weight": 2 }, { "content": "The man is also waving his arms to the music.", "type": "action", "weight": 2 }, { "content": "A short haired woman in a purple top is in front of the two persons.", "type": "attribute", "weight": 1 }, { "content": "The short haired woman dances among the crowd with her tongue out.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera switches to a close-up focusing on the first man and woman. The woman in pink top puts her arms around the man's neck. The two then kiss. They are surrounded by many other young people who are jumping around with their hands up.", "visual_elements": [ { "content": "The camera switches to a close-up focusing on the first man and woman.", "type": "camera", "weight": 3 }, { "content": "The woman in pink top puts her arms around the man's neck.", "type": "action", "weight": 3 }, { "content": "The two kiss.", "type": "action", "weight": 3 }, { "content": "They are surrounded by many other young people who are jumping around with their hands up.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0082", "video_path": "DREAM-1K/330.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 4.5, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with the camera focusing on a blue and grey Automatic Teller Machine (ATM). The ATM is spitting out a large lot of money, which are rapidly pouring out of the cash outlet. The camera sways and then moves backwards.\nNext, the camera switches to a man who stands outdoors with city streets and buildings in the background. The man wears a light-coloured, long-sleeved shirt and has short hair. He looks surprised. He turns his head left and looks down. A red and grey bus can be seen in the background, along with some pedestrians and buildings.\nThe camera switches back to the ATM. A sentence appears slowly on the ATM's screen which reads \"Honestly I thought you would have more money than this\".", "events": [ { "event": "The video begins with the camera focusing on a blue and grey Automatic Teller Machine (ATM). The ATM is spitting out a large lot of money, which are rapidly pouring out of the cash outlet. The camera sways and then moves backwards.", "visual_elements": [ { "content": "The camera focuses on a blue and grey Automatic Teller Machine (ATM).", "type": "camera", "weight": 3 }, { "content": "The ATM is spitting out a large lot of money.", "type": "action", "weight": 3 }, { "content": "A lot of money are rapidly pouring out of the cash outlet.", "type": "action", "weight": 3 }, { "content": "The camera sways and then moves backwards.", "type": "camera", "weight": 3 } ] }, { "event": "Next, the camera switches to a man who stands outdoors with city streets and buildings in the background. The man wears a light-coloured, long-sleeved shirt and has short hair. He looks surprised. He turns his head left and looks down. A red and grey bus can be seen in the background, along with some pedestrians and buildings.", "visual_elements": [ { "content": "The camera switches to a man.", "type": "camera", "weight": 3 }, { "content": "The man stands outdoors.", "type": "attribute", "weight": 2 }, { "content": "The man wears a light-coloured, long-sleeved shirt and has short hair.", "type": "attribute", "weight": 2 }, { "content": "He looks surprised.", "type": "attribute", "weight": 3 }, { "content": "He turns his head left.", "type": "action", "weight": 2 }, { "content": "The man looks down.", "type": "action", "weight": 2 }, { "content": "A red and grey bus can be seen in the background, along with some pedestrians and buildings.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches back to the ATM. A sentence appears slowly on the ATM's screen which reads \"Honestly I thought you would have more money than this\".", "visual_elements": [ { "content": "The camera switches back to the ATM.", "type": "camera", "weight": 3 }, { "content": "A sentence appears slowly on the ATM's screen.", "type": "attribute", "weight": 3 }, { "content": "The sentence reads \"Honestly I thought you would have more money than this\".", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0083", "video_path": "DREAM-1K/331.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 3.88, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with the camera filming from inside the car, showing a man entering the car from outside. He wears a brown jacket and holds a mobile phone in his right hand. Residential houses and parked vehicles can be seen in the background. The man then bends down to enter the car and sits down.\nThe camera switches to the man sitting in the driver's seat. The man places his mobile phone on the phone holder in the car with his right hand and then lowers his right hand. Finally, the man moves slightly to adjust his body.", "events": [ { "event": "The video begins with the camera filming from inside the car, showing a man entering the car from outside. He wears a brown jacket and holds a mobile phone in his right hand. Residential houses and parked vehicles can be seen in the background. The man then bends down to enter the car and sits down.", "visual_elements": [ { "content": "The video begins with the camera filming from inside the car.", "type": "camera", "weight": 3 }, { "content": "A man enters the car from outside.", "type": "action", "weight": 3 }, { "content": "The man wears a brown jacket.", "type": "attribute", "weight": 2 }, { "content": "The man holds a mobile phone in his right hand.", "type": "attribute", "weight": 2 }, { "content": "Residential houses and parked vehicles can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "The man bends down to enter the car and sits down.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the man sitting in the driver's seat. The man places his mobile phone on the phone holder in the car with his right hand and then lowers his right hand. Finally, the man moves slightly to adjust his body.", "visual_elements": [ { "content": "The camera switches to the man sitting in the driver's seat.", "type": "camera", "weight": 3 }, { "content": "The man places his mobile phone on the phone holder in the car with his right hand.", "type": "action", "weight": 3 }, { "content": "The man lowers his right hand.", "type": "action", "weight": 2 }, { "content": "The man moves slightly to adjust his body.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0084", "video_path": "DREAM-1K/332.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 4.88, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with the camera focusing on three men. In the background is a green fence with barbed wire. The man on the left wears a black and white striped referee uniform. The man standing in the middle wears a red sweatshirt. The younger man on the right wears a blue and grey sweatshirt. The man on the right looks to his right and sticks out his tongue. Then the younger man gives a thumbs up to his right. The two men behind the younger man bend over slightly.\nThe scene then switches to a more distant view of a man in a red sweatshirt standing on the ground in the middle of a lawn. The background is a meadow and buildings in the distance. Another woman in a red sweatshirt runs towards the camera in the distance. The man in the red sweatshirt is bent over and holds a red ball in his right hand. The man throws the ball forward and then his body leans forward as the red ball rolls across the grassland.\nThe scene switches and the man in blue and grey sweatshirt runs forward a few steps. The man extends his right foot to kick the red ball out as the two men behind the man straighten up.\nThe scene switches to the man in the red sweatshirt. The man in the red sweatshirt runs a few steps forward and catches the ball with his hands up. The man in the red sweatshirt looks to his left.", "events": [ { "event": "The video begins with the camera focusing on three men. In the background is a green fence with barbed wire. The man on the left wears a black and white striped referee uniform. The man standing in the middle wears a red sweatshirt. The younger man on the right wears a blue and grey sweatshirt. The man on the right looks to his right and sticks out his tongue. Then the younger man gives a thumbs up to his right. The two men behind the younger man bend over slightly.", "visual_elements": [ { "content": "The camera focuses on three men.", "type": "camera", "weight": 3 }, { "content": "In the background is a green fence with barbed wire.", "type": "scene", "weight": 1 }, { "content": "The man on the left wears a black and white striped referee uniform.", "type": "attribute", "weight": 2 }, { "content": "The man standing in the middle wears a red sweatshirt.", "type": "attribute", "weight": 2 }, { "content": "The younger man on the right wears a blue and grey sweatshirt.", "type": "attribute", "weight": 2 }, { "content": "The younger man on the right looks to his right.", "type": "attribute", "weight": 3 }, { "content": "The young man sticks out his tongue to his right side and gives a thumbs up.", "type": "action", "weight": 3 }, { "content": "The two men behind the younger man bend over slightly.", "type": "attribute", "weight": 2 } ] }, { "event": "The scene then switches to a more distant view of a man in a red sweatshirt standing on the ground in the middle of a lawn. The background is a meadow and buildings in the distance. Another woman in a red sweatshirt runs towards the camera in the distance. The man in the red sweatshirt is bent over and holds a red ball in his right hand. The man throws the ball forward and then his body leans forward as the red ball rolls across the grassland.", "visual_elements": [ { "content": "The scene switches to a more distant view.", "type": "camera", "weight": 3 }, { "content": "A man in a red sweatshirt stands on the ground in the middle of a lawn.", "type": "attribute", "weight": 3 }, { "content": "The background is a meadow and buildings in the distance.", "type": "scene", "weight": 2 }, { "content": "Another woman in a red sweatshirt runs towards the camera in the distance.", "type": "action", "weight": 2 }, { "content": "The man in the red sweatshirt is bent over and holds a red ball in his right hand.", "type": "attribute", "weight": 3 }, { "content": "The man throws the ball forward.", "type": "action", "weight": 3 }, { "content": "The man's boy leans forward.", "type": "action", "weight": 2 }, { "content": "The red ball rolls across the grassland.", "type": "action", "weight": 3 } ] }, { "event": "The scene switches and the man in blue and grey sweatshirt runs forward a few steps. The man extends his right foot to kick the red ball out as the two men behind the man straighten up.", "visual_elements": [ { "content": "The scene switches.", "type": "camera", "weight": 3 }, { "content": "The man in blue and grey sweatshirt runs forward a few steps.", "type": "action", "weight": 3 }, { "content": "The man extends his right foot to kick the red ball out.", "type": "action", "weight": 3 }, { "content": "The two men behind the man straighten up.", "type": "action", "weight": 2 } ] }, { "event": "The scene switches to the man in the red sweatshirt. The man in the red sweatshirt runs a few steps forward and catches the ball with his hands up. The man in the red sweatshirt looks to his left.", "visual_elements": [ { "content": "The scene switches to the man in the red sweatshirt.", "type": "camera", "weight": 3 }, { "content": "The man in the red sweatshirt runs a few steps forward.", "type": "action", "weight": 3 }, { "content": "The man in the red sweatshirt catches the ball with his hands up.", "type": "action", "weight": 3 }, { "content": "The man in the red sweatshirt looks to his left.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0085", "video_path": "DREAM-1K/333.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.62, "resolution": { "width": 1920, "height": 784 }, "caption": "The video begins with the camera shooting from inside the car to the outside, where a man can be seen standing in front of the car through the windshield. He wears a plaid shirt with a white vest inside and holds a wooden stick in his left hand. The man in the driver's seat turns the windowshield wiper handle up with his right hand. In the background is a street with trees and houses on both sides and several cars parked in the distance.\nThe camera switches to the inside of the car. The man in the driver's seat holds the steering wheel in hands and turns it slightly. A woman sits in the passenger seat and looks straight ahead.\nThe camera switches again to the outside of the car to film the man standing in front of it. The man outside the car remains standing and holding a wooden stick in his hand. He tilts his head to the left and looks inside the car with a smile.\nThe camera returns to the inside of the car. The woman glances at the man in the driver's seat and then opens the car door. The video stops as the woman is about to get out of the car.", "events": [ { "event": "The video begins with the camera shooting from inside the car to the outside, where a man can be seen standing in front of the car through the windshield. He wears a plaid shirt with a white vest inside and holds a wooden stick in his left hand. The man in the driver's seat turns the windowshield wiper handle up with his right hand. In the background is a street with trees and houses on both sides and several cars parked in the distance.", "visual_elements": [ { "content": "The camera shoots from inside the car to the outside.", "type": "camera", "weight": 3 }, { "content": "A man stands in front of the car.", "type": "attribute", "weight": 3 }, { "content": "The man outside the car wears a plaid shirt with a white vest inside.", "type": "attribute", "weight": 2 }, { "content": "The man holds a wooden stick in his left hand.", "type": "attribute", "weight": 3 }, { "content": "The man in the driver's seat turns the windowshield wiper handle up with his right hand.", "type": "action", "weight": 2 }, { "content": "In the background is a street with trees and houses on both sides and several cars parked in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches to the inside of the car. The man in the driver's seat holds the steering wheel in hands and turns it slightly. A woman sits in the passenger seat and looks straight ahead.", "visual_elements": [ { "content": "The camera switches to the inside of the car.", "type": "camera", "weight": 3 }, { "content": "The man in the driver's seat holds the steering wheel in hands and turns it slightly.", "type": "action", "weight": 2 }, { "content": "A woman sits in the passenger seat.", "type": "attribute", "weight": 3 }, { "content": "The woman looks straight ahead.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches again to the outside of the car to film the man standing in front of it. The man outside the car remains standing and holding a wooden stick in his hand. He tilts his head to the left and looks inside the car with a smile.", "visual_elements": [ { "content": "The camera switches again to the outside of the car.", "type": "camera", "weight": 3 }, { "content": "The man outside the car remains standing.", "type": "attribute", "weight": 2 }, { "content": "The man outside the car holds a wooden stick in his hand.", "type": "attribute", "weight": 2 }, { "content": "The man outside the car tilts his head to the left and looks inside the car.", "type": "action", "weight": 3 }, { "content": "The man outside the car shows a smile.", "type": "action", "weight": 3 } ] }, { "event": "The camera returns to the inside of the car. The woman glances at the man in the driver's seat and then opens the car door. The video stops as the woman is about to get out of the car.", "visual_elements": [ { "content": "The camera returns to the inside of the car.", "type": "camera", "weight": 3 }, { "content": "The woman glances at the man in the driver's seat.", "type": "action", "weight": 2 }, { "content": "The woman opens the car door.", "type": "action", "weight": 3 }, { "content": "The video stops as the woman is about to get out of the car.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0086", "video_path": "DREAM-1K/334.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.5, "resolution": { "width": 1920, "height": 784 }, "caption": "The video begins with the camera focusing on a vintage brown car, which is parked on a side of a street. Several red brick buildings can be seen in the background and there are some shop signs on the buildings. A few vintage cars are travelling along the street and pedestrians are walking along the street.\nA man and a woman are at the right and behind of a brown car. The man wears a black coat lined with a white shirt and holds a brown paper bag. The woman wears a dark coat and a red plaid shirt.\nThe camera switches to focus on the man's hands. The man inserts the key with his right hand and turns the key before pulling it out. The man then pulls the car door open with his left hand.\nThe camera switches to an inside view of the car. The man bends down to enter the car and then places the paper bag under the seat with his left hand. The inside of the car is dim, but details of the inside of the doors can be seen, including door handles and window cranks. He pauses for a moment and then frowns. He turns sideways to look under the seat.", "events": [ { "event": "The video begins with the camera focusing on a vintage brown car, which is parked on a side of a street. Several red brick buildings can be seen in the background and there are some shop signs on the buildings. A few vintage cars are travelling along the street and pedestrians are walking along the street.", "visual_elements": [ { "content": "The video begins with the camera focusing on a vintage brown car.", "type": "camera", "weight": 3 }, { "content": "The car is parked on a side of a street.", "type": "scene", "weight": 1 }, { "content": "Several red brick buildings can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "There are some shop signs on the buildings.", "type": "scene", "weight": 1 }, { "content": "A few vintage cars are travelling along the street.", "type": "action", "weight": 1 }, { "content": "Pedestrians are walking along the street.", "type": "action", "weight": 1 } ] }, { "event": "A man and a woman are at the right and behind of a brown car. The man wears a black coat lined with a white shirt and holds a brown paper bag. The woman wears a dark coat and a red plaid shirt.", "visual_elements": [ { "content": "A man and a woman are at the right and behind of a brown car.", "type": "scene", "weight": 3 }, { "content": "The two walk towards the car.", "type": "action", "weight": 3 }, { "content": "The man wears a black coat lined with a white shirt and holds a brown paper bag.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a dark coat and a red plaid shirt.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to focus on the man's hands. The man inserts the key with his right hand and turns the key before pulling it out. The man then pulls the car door open with his left hand.", "visual_elements": [ { "content": "The camera switches to focus on the man's hands.", "type": "camera", "weight": 3 }, { "content": "The man inserts the key with his right hand.", "type": "action", "weight": 3 }, { "content": "The man turns the key before pulling it out.", "type": "action", "weight": 3 }, { "content": "The man then pulls the car door open with his left hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to an inside view of the car. The man bends down to enter the car and then places the paper bag under the seat with his left hand. The inside of the car is dim, but details of the inside of the doors can be seen, including door handles and window cranks. He pauses for a moment and then frowns. He turns sideways to look under the seat.", "visual_elements": [ { "content": "The camera switches to an inside view of the car.", "type": "camera", "weight": 3 }, { "content": "The man bends down to enter the car.", "type": "action", "weight": 3 }, { "content": "The man places the paper bag under the seat with his left hand.", "type": "action", "weight": 2 }, { "content": "The inside of the car is dim, but details of the inside of the doors can be seen, including door handles and window cranks.", "type": "scene", "weight": 1 }, { "content": "The man pauses for a moment.", "type": "action", "weight": 3 }, { "content": "The man frowns.", "type": "action", "weight": 2 }, { "content": "The man turns sideways to look under the seat.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0087", "video_path": "DREAM-1K/335.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 11.38, "resolution": { "width": 1920, "height": 784 }, "caption": "The video begins with the camera focusing on a dim room where two table lamps shine soft lights. There is a mirror in the room which reflects two people on a bed. The two people are a man and a woman. The man wears a plaid shirt and lies on the bed with his arms crossed over his chest and his eyes closed. The woman wears a yellow shirt and leans against the bed with her hands on her stomach and her eyes closed.\nThe camera switches to a closer angle, showing a close-up of the two people's upper bodies. The man wears a watch on his left hand and the woman also wears a watch on her left hand. The man then opens his eyes and turns his head to the left to look at the woman. The man then sits up slightly to look at the alarm clock on the front left of him.\nThe camera switches again to focus on an old clock in the room this time. The clock sits on a wooden cabinet and shows that it is night.\nAt the end of the video, the camera returns to the close-up of the two people. The man lies down again after watching the clock.", "events": [ { "event": "The video begins with the camera focusing on a dim room where two table lamps shine soft lights. There is a mirror in the room which reflects two people on a bed. The two people are a man and a woman. The man wears a plaid shirt and lies on the bed with his arms crossed over his chest and his eyes closed. The woman wears a yellow shirt and leans against the bed with her hands on her stomach and her eyes closed.", "visual_elements": [ { "content": "The camera focuses on a dim room.", "type": "camera", "weight": 3 }, { "content": "two table lamps shine soft lights.", "type": "scene", "weight": 2 }, { "content": "There is a mirror in the room.", "type": "scene", "weight": 2 }, { "content": "The mirror reflects two people on a bed.", "type": "scene", "weight": 2 }, { "content": "The two people are a man and a woman.", "type": "attribute", "weight": 3 }, { "content": "The man wears a plaid shirt.", "type": "attribute", "weight": 2 }, { "content": "The man lies on the bed.", "type": "attribute", "weight": 2 }, { "content": "The man crosses his arms over his chest.", "type": "attribute", "weight": 2 }, { "content": "The man closes his eyes.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a yellow shirt.", "type": "attribute", "weight": 2 }, { "content": "The woman leans against the bed.", "type": "attribute", "weight": 2 }, { "content": "The woman puts her hands on her stomach.", "type": "attribute", "weight": 2 }, { "content": "The woman closes her eyes.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to a closer angle, showing a close-up of the two people's upper bodies. The man wears a watch on his left hand and the woman also wears a watch on her left hand. The man then opens his eyes and turns his head to the left to look at the woman. The man then sits up slightly to look at the alarm clock on the front left of him.", "visual_elements": [ { "content": "The camera switches to a closer angle, showing a close-up of the two people's upper bodies.", "type": "camera", "weight": 3 }, { "content": "The man wears a watch on his left hand.", "type": "attribute", "weight": 1 }, { "content": "The woman also wears a watch on her left hand.", "type": "attribute", "weight": 1 }, { "content": "The man opens his eyes.", "type": "action", "weight": 3 }, { "content": "The man turns his head to the left.", "type": "action", "weight": 3 }, { "content": "The man looks at the woman.", "type": "action", "weight": 3 }, { "content": "The man sits up slightly to look at the alarm clock on the front left of him.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again to focus on an old clock in the room this time. The clock sits on a wooden cabinet and shows that it is night.", "visual_elements": [ { "content": "The camera switches again to focus on an old clock in the room this time.", "type": "camera", "weight": 3 }, { "content": "The clock sits on a wooden cabinet.", "type": "scene", "weight": 1 }, { "content": "The clock shows that it is night.", "type": "scene", "weight": 1 } ] }, { "event": "At the end of the video, the camera returns to the close-up of the two people. The man lies down again after watching the clock.", "visual_elements": [ { "content": "The camera returns to the close-up of the two people.", "type": "camera", "weight": 3 }, { "content": "The man lies down again after watching the clock.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0088", "video_path": "DREAM-1K/336.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.75, "resolution": { "width": 1920, "height": 784 }, "caption": "The video begins with the camera focusing on the inside of a car. A woman in an orange plaid shirt sits in the passenger seat with a green cake pan on her legs, which holds a round cake covered in white icing. The woman holds the lid of the cake pan in her right hand. The woman then leans over to place the lid under her right side.\nThe camera then switches to a man in the driver's seat. He wears a white striped shirt and turns the steering wheel back and forth slightly with his left hand. The man tilts his head to look down to the right. Bright sunlight and a distant landscape can be seen outsie the car window.\nThe camera switches back again to the woman in the passenger seat. The woman holds the bottom of the cake pan with her left hand and cuts the cake with a small knife in her right hand. The woman then cuts off a small piece of cake. Finally, the woman releases the cake pan in her left hand and moves to the left with the knife in her right hand.", "events": [ { "event": "The video begins with the camera focusing on the inside of a car. A woman in an orange plaid shirt sits in the passenger seat with a green cake pan on her legs, which holds a round cake covered in white icing. The woman holds the lid of the cake pan in her right hand. The woman then leans over to place the lid under her right side.", "visual_elements": [ { "content": "The camera focuses on the inside of a car.", "type": "camera", "weight": 3 }, { "content": "A woman in an orange plaid shirt sits in the passenger seat.", "type": "attribute", "weight": 2 }, { "content": "The woman puts a green cake pan on her legs.", "type": "attribute", "weight": 3 }, { "content": "The cake pan holds a round cake.", "type": "attribute", "weight": 2 }, { "content": "The cake is covered in white icing.", "type": "attribute", "weight": 1 }, { "content": "The woman holds the lid of the cake pan in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The woman leans over to place the lid under her right side.", "type": "action", "weight": 3 } ] }, { "event": "The camera then switches to a man in the driver's seat. He wears a white striped shirt and turns the steering wheel back and forth slightly with his left hand. The man tilts his head to look down to the right. Bright sunlight and a distant landscape can be seen outsie the car window.", "visual_elements": [ { "content": "The camera switches to a man in the driver's seat.", "type": "camera", "weight": 3 }, { "content": "The man wears a white striped shirt.", "type": "attribute", "weight": 2 }, { "content": "The man turns the steering wheel back and forth slightly with his left hand.", "type": "action", "weight": 3 }, { "content": "The man tilts his head to look down to the right.", "type": "action", "weight": 3 }, { "content": "Bright sunlight and a distant landscape can be seen outsie the car window.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches back again to the woman in the passenger seat. The woman holds the bottom of the cake pan with her left hand and cuts the cake with a small knife in her right hand. The woman then cuts off a small piece of cake. Finally, the woman releases the cake pan in her left hand and moves to the left with the knife in her right hand.", "visual_elements": [ { "content": "The camera switches back again to the woman in the passenger seat.", "type": "camera", "weight": 3 }, { "content": "The woman holds the bottom of the cake pan with her left hand.", "type": "action", "weight": 2 }, { "content": "The woman cuts the cake with a small knife in her right hand.", "type": "action", "weight": 3 }, { "content": "The woman cuts off a small piece of cake.", "type": "action", "weight": 3 }, { "content": "The woman releases the cake pan in her left hand.", "type": "action", "weight": 2 }, { "content": "The woman moves to the left with the knife in her right hand.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0089", "video_path": "DREAM-1K/337.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 2.38, "resolution": { "width": 1920, "height": 784 }, "caption": "The video begins with the camera focusing on a man in a light-coloured shirt standing in a room with wooden walls. The background is wooden walls and the lighting is dim. The man looks down with a serious facial expression.\nIn front of the man stands a woman with curly hair and in light-coloured clothes. The woman then raises her right hand and slaps the man. The man's body leans to the left of the frame as a result of the force. The man's body then leans back.", "events": [ { "event": "The video begins with the camera focusing on a man in a light-coloured shirt standing in a room with wooden walls. The background is wooden walls and the lighting is dim. The man looks down with a serious facial expression.", "visual_elements": [ { "content": "The camera focuses on a man in a light-coloured shirt.", "type": "camera", "weight": 3 }, { "content": "The man stands in a room with wooden walls.", "type": "attribute", "weight": 2 }, { "content": "The background is wooden walls and the lighting is dim.", "type": "scene", "weight": 1 }, { "content": "The man has a serious facial expression.", "type": "attribute", "weight": 2 }, { "content": "The man looks down.", "type": "action", "weight": 3 } ] }, { "event": "In front of the man stands a woman with curly hair and in light-coloured clothes. The woman then raises her right hand and slaps the man. The man's body leans to the left of the frame as a result of the force. The man's body then leans back.", "visual_elements": [ { "content": "In front of the man stands a woman with curly hair.", "type": "attribute", "weight": 2 }, { "content": "The woman is in light-coloured clothes.", "type": "attribute", "weight": 1 }, { "content": "The woman raises her right hand and slaps the man.", "type": "action", "weight": 3 }, { "content": "The man's body leans to the left of the frame as a result of the force.", "type": "action", "weight": 3 }, { "content": "The man's body leans back.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0090", "video_path": "DREAM-1K/338.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 5.5, "resolution": { "width": 1712, "height": 1024 }, "caption": "The video begins with the camera focusing on a middle-aged man sitting in a car. He wears glasses and a dark coat. He sits in the driver's seat, with his body leaning to the right and his face turning back to the camera. The inside of the car is dim and a blurred background of buildings can be seen outside the window.\nNext, the man picks up a brown paper bag next to his seat, turns around and places the bag on his chest. The man lowers his head as he holds the bag in his left hand and reaches his right hand inside the bag to rummage around. At the same time, the man is chewing something in his mouth. The man then takes a piece of dessert out of the bag with his right hand. Finally, the man puts the dessert into his mouth. The man opens his mouth and takes a bite.", "events": [ { "event": "The video begins with the camera focusing on a middle-aged man sitting in a car. He wears glasses and a dark coat. He sits in the driver's seat, with his body leaning to the right and his face turning back to the camera. The inside of the car is dim and a blurred background of buildings can be seen outside the window.", "visual_elements": [ { "content": "The camera focuses on a middle-aged man sitting in a car.", "type": "camera", "weight": 3 }, { "content": "The man wears glasses and a dark coat.", "type": "attribute", "weight": 2 }, { "content": "The man sits in the driver's seat.", "type": "attribute", "weight": 2 }, { "content": "The man lenas his body to the right and turns his face back to the camera.", "type": "attribute", "weight": 2 }, { "content": "The inside of the car is dim.", "type": "scene", "weight": 1 }, { "content": "A blurred background of buildings can be seen outside the window.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the man picks up a brown paper bag next to his seat, turns around and places the bag on his chest. The man lowers his head as he holds the bag in his left hand and reaches his right hand inside the bag to rummage around. At the same time, the man is chewing something in his mouth. The man then takes a piece of dessert out of the bag with his right hand. Finally, the man puts the dessert into his mouth. The man opens his mouth and takes a bite.", "visual_elements": [ { "content": "The man picks up a brown paper bag next to his seat.", "type": "action", "weight": 3 }, { "content": "The man turns around and places the bag on his chest.", "type": "action", "weight": 3 }, { "content": "The man lowers his head.", "type": "action", "weight": 3 }, { "content": "The man holds the bag in his left hand.", "type": "action", "weight": 3 }, { "content": "The man reaches his right hand inside the bag to rummage around.", "type": "action", "weight": 3 }, { "content": "The man is chewing something in his mouth.", "type": "action", "weight": 2 }, { "content": "The man takes a piece of dessert out of the bag with his right hand.", "type": "action", "weight": 3 }, { "content": "The man puts the dessert into his mouth.", "type": "action", "weight": 3 }, { "content": "The man opens his mouth and takes a bite.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0091", "video_path": "DREAM-1K/339.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 4.5, "resolution": { "width": 1712, "height": 1024 }, "caption": "The video begins with a woman in a light-coloured dress standing on the grass. She smiles and holds up a little girl with her hands. The little girl wears a light-coloured dress. The woman then holds the little girl up and spins around.\nThe camera then switches to the street. A boy wearing green and grey clothes has his back to the camera. Standing in front of this little boy is another a little boy in a striped jacket and blue jeans. The boy in green and grey throws the rugby ball in his hand towards the boy in blue jeans. The little boy in blue jeans looks up at the rugby ball and raises his hands ready to catch it.\nSuddenly, a middle-aged man wearing a white shirt and khaki trousers enters from the right side of the frame. The middle-aged man catches the football with both hands and then turns to walk towards the boy in blue jeans. He touches the head of the boy in blue jeans with his left hand and puts his arm around the boy's shoulder. The man wraps his arm around the boy in blue jeans and smiles as he walks towards the boy in green and grey.\nThe camera sways. Some old buildings and streetlights can be seen in the background. At the end of the video, the American flag logo, three stars and a text appear in the subtitles in the lower left corner of the screen. The text reads \"BRAUN MAYOR\".", "events": [ { "event": "The video begins with a woman in a light-coloured dress standing on the grass. She smiles and holds up a little girl with her hands. The little girl wears a light-coloured dress. The woman then holds the little girl up and spins around.", "visual_elements": [ { "content": "A woman is in a light-coloured dress.", "type": "attribute", "weight": 2 }, { "content": "The woman stands on the grass.", "type": "attribute", "weight": 1 }, { "content": "She smiles and holds up a little girl with her hands.", "type": "action", "weight": 3 }, { "content": "The little girl wears a light-coloured dress.", "type": "attribute", "weight": 2 }, { "content": "The woman holds the little girl up and spins around.", "type": "action", "weight": 3 } ] }, { "event": "The camera then switches to the street. A boy wearing green and grey clothes has his back to the camera. Standing in front of this little boy is another a little boy in a striped jacket and blue jeans. The boy in green and grey throws the rugby ball in his hand towards the boy in blue jeans. The little boy in blue jeans looks up at the rugby ball and raises his hands ready to catch it.", "visual_elements": [ { "content": "The camera switches to the street.", "type": "camera", "weight": 3 }, { "content": "A boy wearing green and grey clothes has his back to the camera.", "type": "attribute", "weight": 2 }, { "content": "Standing in front of this little boy is another a little boy in a striped jacket and blue jeans.", "type": "attribute", "weight": 2 }, { "content": "The boy in green and grey throws the rugby ball in his hand towards the boy in blue jeans.", "type": "action", "weight": 3 }, { "content": "The little boy in blue jeans looks up at the rugby ball.", "type": "action", "weight": 2 }, { "content": "The little boy in blue jeans raises his hands ready to catch the ball.", "type": "action", "weight": 2 } ] }, { "event": "Suddenly, a middle-aged man wearing a white shirt and khaki trousers enters from the right side of the frame. The middle-aged man catches the football with both hands and then turns to walk towards the boy in blue jeans. He touches the head of the boy in blue jeans with his left hand and puts his arm around the boy's shoulder. The man wraps his arm around the boy in blue jeans and smiles as he walks towards the boy in green and grey.", "visual_elements": [ { "content": "A middle-aged man wearing a white shirt and khaki trousers enters from the right side of the frame.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged man catches the football with both hands.", "type": "action", "weight": 3 }, { "content": "The middle-aged man turns to walk towards the boy in blue jeans.", "type": "action", "weight": 3 }, { "content": "The man touches the head of the boy in blue jeans with his left hand and puts his arm around the boy's shoulder.", "type": "action", "weight": 3 }, { "content": "The man wraps his arm around the boy in blue jeans and smiles as he walks towards the boy in green and grey.", "type": "action", "weight": 3 } ] }, { "event": "The camera sways. Some old buildings and streetlights can be seen in the background. At the end of the video, the American flag logo, three stars and a text appear in the subtitles in the lower left corner of the screen. The text reads \"BRAUN MAYOR\".", "visual_elements": [ { "content": "The camera sways.", "type": "camera", "weight": 3 }, { "content": "Some old buildings and streetlights can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "At the end of the video, the American flag logo, three stars and a text appear in the subtitles in the lower left corner of the screen.", "type": "scene", "weight": 1 }, { "content": "The text reads \"BRAUN MAYOR\".", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0092", "video_path": "DREAM-1K/340.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.62, "resolution": { "width": 1920, "height": 800 }, "caption": "The video begins with the camera focusing on a man in a cowboy outfit. He wears a wide-brimmed cowboy hat, a black printed scarf around his neck, and a pair of gloves on his hands. In the background are blurred green leaves and a blue sky. The man looks down and lifts his left hand to take off the hat.\nThe camera switches to another scene. At this point, a woman on a horse appears in the background. The woman wears a sleeveless white vest and her hair is dyed a bright colour. She holds a wide-brimmed hat in her left hand. The woman slowly lowers the hat in her hand. In the foreground, on the left side of the frame the man in the first frame can be seen slowly place the removed cowboy hat onto his chest. The background is a cemetery with tombstones neatly arranged on the green grass and surrounded by thick trees.", "events": [ { "event": "The video begins with the camera focusing on a man in a cowboy outfit. He wears a wide-brimmed cowboy hat, a black printed scarf around his neck, and a pair of gloves on his hands. In the background are blurred green leaves and a blue sky. The man looks down and lifts his left hand to take off the hat.", "visual_elements": [ { "content": "The camera focuses on a man in a cowboy outfit.", "type": "camera", "weight": 3 }, { "content": "He wears a wide-brimmed cowboy hat, a black printed scarf around his neck, and a pair of gloves on his hands.", "type": "attribute", "weight": 3 }, { "content": "In the background are blurred green leaves and a blue sky.", "type": "scene", "weight": 2 }, { "content": "The man looks down.", "type": "attribute", "weight": 2 }, { "content": "The man lifts his left hand to take off the hat.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to another scene. At this point, a woman on a horse appears in the background. The woman wears a sleeveless white vest and her hair is dyed a bright colour. She holds a wide-brimmed hat in her left hand. The woman slowly lowers the hat in her hand. In the foreground, on the left side of the frame the man in the first frame can be seen slowly place the removed cowboy hat onto his chest. The background is a cemetery with tombstones neatly arranged on the green grass and surrounded by thick trees.", "visual_elements": [ { "content": "The camera switches to another scene.", "type": "camera", "weight": 3 }, { "content": "A woman on a horse appears in the background.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a sleeveless white vest and her hair is dyed a bright colour.", "type": "attribute", "weight": 2 }, { "content": "She holds a wide-brimmed hat in her left hand.", "type": "attribute", "weight": 2 }, { "content": "The woman slowly lowers the hat in her hand.", "type": "action", "weight": 3 }, { "content": "In the foreground, on the left side of the frame the man in the first frame can be seen slowly place the removed cowboy hat onto his chest.", "type": "action", "weight": 3 }, { "content": "The background is a cemetery with tombstones neatly arranged on the green grass and surrounded by thick trees.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0093", "video_path": "DREAM-1K/341.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 7.88, "resolution": { "width": 1920, "height": 864 }, "caption": "The video begins with the frame divided into left and right parts. In the left frame, a man with short hair wears a light yellow turtleneck and headphones with a black microphone on them. He is working carefully with his head down. Other staff members can be seen in the background. The person next to the man wears a white shirt. The man looks up.\nThe right frame shows a rocket being launched, with bright flames and thick smoke shooting out of the tail of the rocket. The rocket is rising rapidly with a dark blue sky in the background. The upward trajectory of the rocket is clearly visible, with a long tail of flame and smoke behind the rocket.\nThe rocket in the right frame fills the whole frame. The rocket continues to rise and begins to detach, with some part of the rocket separating from the main body. The rocket forms a bright light and continues to rise.", "events": [ { "event": "The video begins with the frame divided into left and right parts. In the left frame, a man with short hair wears a light yellow turtleneck and headphones with a black microphone on them. He is working carefully with his head down. Other staff members can be seen in the background. The person next to the man wears a white shirt. The man looks up.", "visual_elements": [ { "content": "The frame is divided into left and right parts.", "type": "camera", "weight": 3 }, { "content": "In the left frame, a man with short hair wears a light yellow turtleneck and headphones with a black microphone on them.", "type": "attribute", "weight": 3 }, { "content": "He is working carefully with his head down.", "type": "action", "weight": 2 }, { "content": "The person next to the man wears a white shirt.", "type": "attribute", "weight": 1 }, { "content": "The man looks up.", "type": "action", "weight": 3 } ] }, { "event": "The right frame shows a rocket being launched, with bright flames and thick smoke shooting out of the tail of the rocket. The rocket is rising rapidly with a dark blue sky in the background. The upward trajectory of the rocket is clearly visible, with a long tail of flame and smoke behind the rocket.", "visual_elements": [ { "content": "The right frame shows a rocket being launched.", "type": "attribute", "weight": 3 }, { "content": "Bright flames and thick smoke are shooting out of the tail of the rocket.", "type": "attribute", "weight": 3 }, { "content": "The rocket is rising rapidly.", "type": "action", "weight": 2 }, { "content": "A dark blue sky is in the background.", "type": "scene", "weight": 1 }, { "content": "The upward trajectory of the rocket is clearly visible, with a long tail of flame and smoke behind the rocket.", "type": "attribute", "weight": 2 } ] }, { "event": "The rocket in the right frame fills the whole frame. The rocket continues to rise and begins to detach, with some part of the rocket separating from the main body. The rocket forms a bright light and continues to rise.", "visual_elements": [ { "content": "The rocket in the right frame fills the whole frame.", "type": "camera", "weight": 3 }, { "content": "The rocket continues to rise.", "type": "action", "weight": 2 }, { "content": "The rocket begins to detach, with some part of the rocket separating from the main body.", "type": "action", "weight": 3 }, { "content": "The rocket forms a bright light and continues to rise.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0094", "video_path": "DREAM-1K/342.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.62, "resolution": { "width": 1920, "height": 864 }, "caption": "In the first group of pictures, the scene shows a family outdoors. In the background is a house with brick exterior walls and large windows. In front of the room is a set of swings with five figures on them. The child on the left wears a striped long-sleeved shirt and trousers. The child holds onto the crossbar of the swing with both hands as his body is suspended in the air, his right leg bends, and his left leg straightens to the left side of the frame. The child in the centre stands on the swing seat, wearing a long-sleeved shirt and trousers. A man stands on the left of the boy. The man wears a short-sleeved shirt and trousers with his hands on the swing chain as he looks down to his left. The child on the far right of the picture stands on a bench next to another end of the swing. The child wears a striped sleeveless shirt and trousers, with his hand grasping the crossbar of the swing. Behind the child on the far right sits a woman wearing a short-sleeved shirt and trousers, with her hands resting on her knees. A dog is on the ground on the left side of the frame, with its ears pricked up.\nThe scene shifts to show an indoor scene with a control room in the background with multiple dashboards and equipment on the walls. There are three men in the foreground, wearing white coveralls and hats. The man in the middle wears a spacesuit which has multiple connecting tubes and equipment. The man on the left stands behind the astronaut and helps adjust his spacesuit with both hands. The man on the right stands in front of the astronaut with his hands on the astronaut's chest checking or adjusting the equipment on the suit. The camera moves downwords. The astronaut lowers his head and then raises his right hand which wears a black glove and watch. The astronaut then moves his right hand and then puts it down. Behind the three men is a staff in white overalls, leaving towards the left of the frame.", "events": [ { "event": "In the first group of pictures, the scene shows a family outdoors. In the background is a house with brick exterior walls and large windows. In front of the room is a set of swings with five figures on them. The child on the left wears a striped long-sleeved shirt and trousers. The child holds onto the crossbar of the swing with both hands as his body is suspended in the air, his right leg bends, and his left leg straightens to the left side of the frame. The child in the centre stands on the swing seat, wearing a long-sleeved shirt and trousers. A man stands on the left of the boy. The man wears a short-sleeved shirt and trousers with his hands on the swing chain as he looks down to his left. The child on the far right of the picture stands on a bench next to another end of the swing. The child wears a striped sleeveless shirt and trousers, with his hand grasping the crossbar of the swing. Behind the child on the far right sits a woman wearing a short-sleeved shirt and trousers, with her hands resting on her knees. A dog is on the ground on the left side of the frame, with its ears pricked up.", "visual_elements": [ { "content": "In the first group of pictures, the scene shows a family outdoors.", "type": "scene", "weight": 2 }, { "content": "In the background is a house with brick exterior walls and large windows.", "type": "scene", "weight": 1 }, { "content": "In front of the room is a set of swings with five figures on them.", "type": "attribute", "weight": 3 }, { "content": "The child on the left wears a striped long-sleeved shirt and trousers.", "type": "attribute", "weight": 2 }, { "content": "The child on the left holds onto the crossbar of the swing with both hands as his body is suspended in the air, his right leg bends, and his left leg straightens to the left side of the frame.", "type": "attribute", "weight": 2 }, { "content": "The child in the centre stands on the swing seat, wearing a long-sleeved shirt and trousers.", "type": "attribute", "weight": 2 }, { "content": "A man stands on the left of the boy and wears a short-sleeved shirt and trousers.", "type": "attribute", "weight": 2 }, { "content": "The man puts his hands on the swing chain as he looks down to his left.", "type": "attribute", "weight": 2 }, { "content": "The child on the far right of the picture stands on a bench next to another end of the swing and wears a striped sleeveless shirt and trousers.", "type": "attribute", "weight": 2 }, { "content": "The child on the bench grasps the crossbar of the swing with his hands.", "type": "attribute", "weight": 2 }, { "content": "Behind the child on the far right sits a woman wearing a short-sleeved shirt and trousers.", "type": "attribute", "weight": 2 }, { "content": "The woman rests her hands on her knees.", "type": "attribute", "weight": 2 }, { "content": "A dog is on the ground on the left side of the frame.", "type": "attribute", "weight": 1 }, { "content": "The dogs' ears are pricked up.", "type": "attribute", "weight": 1 } ] }, { "event": "The scene shifts to show an indoor scene with a control room in the background with multiple dashboards and equipment on the walls. There are three men in the foreground, wearing white coveralls and hats. The man in the middle wears a spacesuit which has multiple connecting tubes and equipment. The man on the left stands behind the astronaut and helps adjust his spacesuit with both hands. The man on the right stands in front of the astronaut with his hands on the astronaut's chest checking or adjusting the equipment on the suit. The camera moves downwords. The astronaut lowers his head and then raises his right hand which wears a black glove and watch. The astronaut then moves his right hand and then puts it down. Behind the three men is a staff in white overalls, leaving towards the left of the frame.", "visual_elements": [ { "content": "The scene shifts to show an indoor scene.", "type": "camera", "weight": 3 }, { "content": "The background is a control room with multiple dashboards and equipment on the walls.", "type": "scene", "weight": 1 }, { "content": "There are three men in the foreground, wearing white coveralls and hats.", "type": "attribute", "weight": 3 }, { "content": "The man in the middle wears a spacesuit which has multiple connecting tubes and equipment.", "type": "attribute", "weight": 3 }, { "content": "The man on the left stands behind the astronaut.", "type": "attribute", "weight": 2 }, { "content": "The man on the left helps adjust his spacesuit with both hands.", "type": "action", "weight": 3 }, { "content": "The man on the right stands in front of the astronaut.", "type": "attribute", "weight": 3 }, { "content": "The man on the right puts his hands on the astronaut's chest to check or adjust the equipment on the suit.", "type": "action", "weight": 3 }, { "content": "The astronaut lowers his head.", "type": "action", "weight": 3 }, { "content": "The camera moves downwords.", "type": "camera", "weight": 3 }, { "content": "The man raises his right hand which wears a black glove and watch.", "type": "action", "weight": 3 }, { "content": "The astronaut moves his right hand.", "type": "action", "weight": 2 }, { "content": "The astronaut puts his right hand down.", "type": "action", "weight": 2 }, { "content": "Behind the three men is a staff in white overalls.", "type": "attribute", "weight": 1 }, { "content": "The staff behind leaves towards the left of the frame.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 29 }, { "index": "TUNA_0095", "video_path": "DREAM-1K/343.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 7.88, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with the camera focusing on a kitchen, which is dim with several fluorescent lights in the ceiling. The kitchen is flanked by metal shelves and workbenches with a variety of cookware and ingredients on them. In the middle of the frame is a man wearing a light-coloured short-sleeved shirt and dark sweatpants. The man stands in front of a refrigerator whose door is open.\nThe man holds several items in his left hand, and takes out a rectangular box inside the refrigerator with his right hand. The man then places the box a metal bucket on his left hand and holds the box with his chin. The man then closes the refrigerator with his right hand and turns around to walk towards the table behind him. Next, the man takes a bottle of milk on the table with his right hand and turns around to walk towards the left side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a kitchen, which is dim with several fluorescent lights in the ceiling. The kitchen is flanked by metal shelves and workbenches with a variety of cookware and ingredients on them. In the middle of the frame is a man wearing a light-coloured short-sleeved shirt and dark sweatpants. The man stands in front of a refrigerator whose door is open.", "visual_elements": [ { "content": "The camera focuses on a kitchen.", "type": "camera", "weight": 3 }, { "content": "The kitchen is dim with several fluorescent lights in the ceiling.", "type": "scene", "weight": 1 }, { "content": "The kitchen is flanked by metal shelves and workbenches with a variety of cookware and ingredients on them.", "type": "scene", "weight": 1 }, { "content": "In the middle of the frame is a man.", "type": "attribute", "weight": 3 }, { "content": "The man wears a light-coloured short-sleeved shirt and dark sweatpants.", "type": "attribute", "weight": 2 }, { "content": "The man stands in front of a refrigerator whose door is open.", "type": "attribute", "weight": 3 } ] }, { "event": "The man holds several items in his left hand, and takes out a rectangular box inside the refrigerator with his right hand. The man then places the box a metal bucket on his left hand and holds the box with his chin. The man then closes the refrigerator with his right hand and turns around to walk towards the table behind him. Next, the man takes a bottle of milk on the table with his right hand and turns around to walk towards the left side of the frame.", "visual_elements": [ { "content": "The man holds several items in his left hand.", "type": "attribute", "weight": 2 }, { "content": "The man takes out a rectangular box inside the refrigerator with his right hand.", "type": "action", "weight": 3 }, { "content": "The man places the box a metal bucket on his left hand.", "type": "action", "weight": 3 }, { "content": "The man holds the box with his chin.", "type": "action", "weight": 3 }, { "content": "The man closes the refrigerator with his right hand.", "type": "action", "weight": 3 }, { "content": "The man turns around to walk towards the table behind him.", "type": "action", "weight": 3 }, { "content": "The man takes a bottle of milk on the table with his right hand.", "type": "action", "weight": 3 }, { "content": "The man turns around to walk towards the left side of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0096", "video_path": "DREAM-1K/344.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 7.0, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with the camera focusing on a boxing ring where two male boxers are engaged in a fierce battle. Both men are bare-chested and have white bandages on their hands. In the background, red ropes can be seen surrounding the ring and spectators outside the ring are watching the fight.\nThe camera first focuses on a boxer with a buzz cut. In the foreground is the back of the boxer with a buzz cut. The boxer with a buzz cut leans forward and throws a punch at his opponent opposite him.\nThe camera switches to the back of the boxer with a buzz cut. The opponent of the boxer with a buzz cut bends over to avoid the attack. The boxer with a buzz cut wears red shorts, while his opponent wears black shorts.\nThe camera switches to an overhead view again, showing the panoramic view of the boxing ring. The boxer with a buzz cut in red shorts then throws the boxer in black shorts to the ground with both hands. The auditorium around the ring is filled up with people standing up and waving their arms. The referee immediately steps forward and reaches out to stop the boxer in red shorts. The boxer in red shorts moves to his right. The boxer in red shorts looks at his fallen opponent. The boxer in black shorts climbs a few steps in the ring before getting up and moving forward.", "events": [ { "event": "The video begins with the camera focusing on a boxing ring where two male boxers are engaged in a fierce battle. Both men are bare-chested and have white bandages on their hands. In the background, red ropes can be seen surrounding the ring and spectators outside the ring are watching the fight.", "visual_elements": [ { "content": "The camera focuses on a boxing ring.", "type": "camera", "weight": 3 }, { "content": "Two male boxers are engaged in a fierce battle.", "type": "action", "weight": 3 }, { "content": "Both men are bare-chested and have white bandages on their hands.", "type": "attribute", "weight": 2 }, { "content": "In the background, red ropes can be seen surrounding the ring and spectators outside the ring are watching the fight.", "type": "scene", "weight": 1 } ] }, { "event": "The camera first focuses on a boxer with a buzz cut. In the foreground is the back of the boxer with a buzz cut. The boxer with a buzz cut leans forward and throws a punch at his opponent opposite him.", "visual_elements": [ { "content": "The camera first focuses on a boxer with a buzz cut.", "type": "camera", "weight": 3 }, { "content": "In the foreground is the back of the boxer with a buzz cut.", "type": "action", "weight": 3 }, { "content": "The boxer with a buzz cut leans forward and throws a punch at his opponent opposite him.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the back of the boxer with a buzz cut. The opponent of the boxer with a buzz cut bends over to avoid the attack. The boxer with a buzz cut wears red shorts, while his opponent wears black shorts.", "visual_elements": [ { "content": "The camera switches to the back of the boxer with a buzz cut.", "type": "camera", "weight": 3 }, { "content": "The opponent of the boxer with a buzz cut bends over to avoid the attack.", "type": "action", "weight": 3 }, { "content": "The boxer with a buzz cut wears red shorts.", "type": "action", "weight": 3 }, { "content": "The opponent of the boxer with a buzz cut wears black shorts.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to an overhead view again, showing the panoramic view of the boxing ring. The boxer with a buzz cut in red shorts then throws the boxer in black shorts to the ground with both hands. The auditorium around the ring is filled up with people standing up and waving their arms. The referee immediately steps forward and reaches out to stop the boxer in red shorts. The boxer in red shorts moves to his right. The boxer in red shorts looks at his fallen opponent. The boxer in black shorts climbs a few steps in the ring before getting up and moving forward.", "visual_elements": [ { "content": "The camera switches to an overhead view again, showing the panoramic view of the boxing ring.", "type": "camera", "weight": 3 }, { "content": "The boxer with a buzz cut in red shorts throws the boxer in black shorts to the ground with both hands.", "type": "action", "weight": 3 }, { "content": "The auditorium around the ring is filled up with people standing up and waving their arms.", "type": "scene", "weight": 2 }, { "content": "The referee immediately steps forward and reaches out to stop the boxer in red shorts.", "type": "action", "weight": 3 }, { "content": "The boxer in red shorts moves to his right.", "type": "action", "weight": 2 }, { "content": "The boxer in red shorts looks at his fallen opponent.", "type": "action", "weight": 1 }, { "content": "The boxer in black shorts climbs a few steps in the ring before getting up and moving forward.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0097", "video_path": "DREAM-1K/345.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 6.75, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with the frame of a man sitting on a sofa and wearing a light-coloured T-shirt and shorts. A laptop computer sits on the table in front of him and a boxing match is shown on the screen. He turns his head to look at the computer while strokes a dog under the table with his right hand. The dog moves every now and then. A bathtub can be seen in the background with a pair of sneakers next to it. A woman sits on a large bed behind the bathtub. The decoration of the room is very luxurious with columns and statues, murals on the walls, and soft lighting in the room. In the background there is a man in a red shirt and dark trousers by the window on the right side of the frame. The man bends over and lights a cigarette for himself.\nThe camera switches to a close-up of the man by the window. The man is smoking a cigarette and looking out of the window. In the background a woman can be seen on a bed. The woman wears light-coloured clothes and is tidying up items on the bed.", "events": [ { "event": "The video begins with the frame of a man sitting on a sofa and wearing a light-coloured T-shirt and shorts. A laptop computer sits on the table in front of him and a boxing match is shown on the screen. He turns his head to look at the computer while strokes a dog under the table with his right hand. The dog moves every now and then. A bathtub can be seen in the background with a pair of sneakers next to it. A woman sits on a large bed behind the bathtub. The decoration of the room is very luxurious with columns and statues, murals on the walls, and soft lighting in the room. In the background there is a man in a red shirt and dark trousers by the window on the right side of the frame. The man bends over and lights a cigarette for himself.", "visual_elements": [ { "content": "A man sits on a sofa.", "type": "attribute", "weight": 3 }, { "content": "The man wears a light-coloured T-shirt and shorts.", "type": "attribute", "weight": 1 }, { "content": "A laptop computer sits on the table.", "type": "scene", "weight": 3 }, { "content": "A boxing match is shown on the computer screen.", "type": "scene", "weight": 2 }, { "content": "The man turns his head to look at the computer.", "type": "action", "weight": 3 }, { "content": "The man strokes a dog under the table with his right hand.", "type": "action", "weight": 3 }, { "content": "The dog moves every now and then.", "type": "action", "weight": 2 }, { "content": "A bathtub can be seen in the background with a pair of sneakers next to it.", "type": "scene", "weight": 1 }, { "content": "A woman sits on a large bed behind the bathtub.", "type": "scene", "weight": 1 }, { "content": "The decoration of the room is very luxurious with columns and statues, murals on the walls, and soft lighting in the room.", "type": "scene", "weight": 1 }, { "content": "In the background there is a man in a red shirt and dark trousers by the window on the right side of the frame.", "type": "scene", "weight": 2 }, { "content": "The man bends over and lights a cigarette for himself.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a close-up of the man by the window. The man is smoking a cigarette and looking out of the window. In the background a woman can be seen on a bed. The woman wears light-coloured clothes and is tidying up items on the bed.", "visual_elements": [ { "content": "The camera switches to a close-up of the man by the window.", "type": "action", "weight": 2 }, { "content": "The man is smoking a cigarette and looking out of the window.", "type": "action", "weight": 2 }, { "content": "In the background a bed can be seen.", "type": "scene", "weight": 2 }, { "content": "A woman is on the bed.", "type": "attribute", "weight": 2 }, { "content": "The woman wears light-coloured clothes.", "type": "attribute", "weight": 1 }, { "content": "The woman is tidying up items on the bed.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0098", "video_path": "DREAM-1K/346.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 6.38, "resolution": { "width": 1904, "height": 1024 }, "caption": "The video begins with the camera focusing on a corner of a restaurant where three people sit at a table. The man on the left wears a dark short-sleeved shirt. He rests his bandaged left hand on the table and puts his right hand on the back of his chair as he looks at the person opposite him. The man sitting opposite him wears a blue long-sleeved shirt and puts his hands on the table with a red plastic cup in front of him. A woman wears a light-coloured short-sleeved top with a cross pattern on it. She rests her hands on her legs and a red plastic cup is also in front of her. The woman is weeping with her head down and is holding a leash attached to a dog in her left hand. The woman then raises her right hand to wipe the tears from her face. The man in blue long sleeves turns his head slightly to look at the right. Other customers in the restaurant can be seen in the background, some chatting and others eating. The restaurant is decorated in a vintage style, with some decorations hanging on the walls and colourful balloons hanging from the ceiling.\nAfter a while, a waiter comes over from the left with a large plate in his hand. The waiter puts the plate in the middle of the table, and on the plate is a large piece of pizza. The waiter puts the pizza down and turns to leave.\nThe man on the left turns his head to the right and speaks. The man in the blue long sleeve turns his head to look at the pizza and rolls his eyes every now and then. The woman raises her right hand again to wipe the tears from her face. The dog under the table moves its head every now and then.", "events": [ { "event": "The video begins with the camera focusing on a corner of a restaurant where three people sit at a table. The man on the left wears a dark short-sleeved shirt. He rests his bandaged left hand on the table and puts his right hand on the back of his chair as he looks at the person opposite him. The man sitting opposite him wears a blue long-sleeved shirt and puts his hands on the table with a red plastic cup in front of him. A woman wears a light-coloured short-sleeved top with a cross pattern on it. She rests her hands on her legs and a red plastic cup is also in front of her. The woman is weeping with her head down and is holding a leash attached to a dog in her left hand. The woman then raises her right hand to wipe the tears from her face. The man in blue long sleeves turns his head slightly to look at the right. Other customers in the restaurant can be seen in the background, some chatting and others eating. The restaurant is decorated in a vintage style, with some decorations hanging on the walls and colourful balloons hanging from the ceiling.", "visual_elements": [ { "content": "The camera focuses on a corner of a restaurant.", "type": "camera", "weight": 3 }, { "content": "Three people sit at a table.", "type": "attribute", "weight": 3 }, { "content": "The man on the left wears a dark short-sleeved shirt.", "type": "attribute", "weight": 2 }, { "content": "The man rests his bandaged left hand on the table.", "type": "attribute", "weight": 2 }, { "content": "The man puts his right hand on the back of his chair.", "type": "attribute", "weight": 2 }, { "content": "The man looks at the person opposite him.", "type": "attribute", "weight": 2 }, { "content": "The man sitting opposite wears a blue long-sleeved shirt.", "type": "attribute", "weight": 2 }, { "content": "The man in the blue long-sleeved shirt puts his hands on the table.", "type": "attribute", "weight": 2 }, { "content": "A red plastic cup is in front of the man in the blue long-sleeved shirt.", "type": "attribute", "weight": 2 }, { "content": "The woman on the left of the man in the blue long-sleeved shirt wears a light-coloured short-sleeved top with a cross pattern on it.", "type": "attribute", "weight": 1 }, { "content": "The woman rests her hands on her legs.", "type": "attribute", "weight": 1 }, { "content": "A red plastic cup is also in front of the woman.", "type": "attribute", "weight": 2 }, { "content": "The woman is weeping with her head down.", "type": "action", "weight": 2 }, { "content": "The woman is holding a leash attached to a dog in her left hand.", "type": "attribute", "weight": 1 }, { "content": "The woman raises her right hand to wipe the tears from her face.", "type": "action", "weight": 3 }, { "content": "The man in blue long sleeves turns his head slightly to look at the right.", "type": "action", "weight": 2 }, { "content": "Other customers in the restaurant can be seen in the background, some chatting and others eating.", "type": "scene", "weight": 1 }, { "content": "The restaurant is decorated in a vintage style, with some decorations hanging on the walls and colourful balloons hanging from the ceiling.", "type": "scene", "weight": 1 } ] }, { "event": "After a while, a waiter comes over from the left with a large plate in his hand. The waiter puts the plate in the middle of the table, and on the plate is a large piece of pizza. The waiter puts the pizza down and turns to leave.", "visual_elements": [ { "content": "A waiter comes over from the left.", "type": "action", "weight": 3 }, { "content": "The waiter holds a large plate in his hand.", "type": "action", "weight": 2 }, { "content": "The waiter puts the plate in the middle of the table.", "type": "action", "weight": 2 }, { "content": "On the plate is a large piece of pizza.", "type": "attribute", "weight": 2 }, { "content": "The waiter puts the pizza down.", "type": "action", "weight": 2 }, { "content": "The waiter turns to leave.", "type": "action", "weight": 2 } ] }, { "event": "The man on the left turns his head to the right and speaks. The man in the blue long sleeve turns his head to look at the pizza and rolls his eyes every now and then. The woman raises her right hand again to wipe the tears from her face. The dog under the table moves its head every now and then.", "visual_elements": [ { "content": "The man on the left turns his head to the right and speaks.", "type": "action", "weight": 3 }, { "content": "The man in the blue long sleeve turns his head to look at the pizza.", "type": "action", "weight": 2 }, { "content": "The man in the blue long sleeve rolls his eyes every now and then.", "type": "action", "weight": 1 }, { "content": "The woman raises her right hand again to wipe the tears from her face.", "type": "action", "weight": 3 }, { "content": "The dog under the table moves its head every now and then.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 29 }, { "index": "TUNA_0099", "video_path": "DREAM-1K/347.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 11.88, "resolution": { "width": 1872, "height": 800 }, "caption": "The video begins with the camera showing a prison. A woman in a blue uniform stands by the iron gate of the prison and holds a set of keys. A man in a grey uniform stands on the other side of the iron gate with his left hand on it and his head turned to look into the room. A number of notices and signs can be seen hanging on the walls in the background, and the lighting is rather dim. In the bottom right corner of the frame, a blurred woman sits in the room. The woman standing by the gate then takes a few steps back. The woman sitting in the room prepares to stand up.\nNext, the camera switches to the interior of a prison cell which has straw mats on the floor. A woman in a grey shirt and black trousers crouches in the corner. She stands up holding onto the wall behind her and walks towards the iron gate. Looking down from above, the camera shows the layout of the cell, including a sink and a blue bucket.\nThe camera then switches back to the outside of the gate again. The woman in black trousers stops at the gate and looks to her right. The woman in the blue uniform grabs the left arm of the woman in the black trousers with her right hand. The man in the grey uniform closes the iron gate.\nThe camera switches to a corridor and pans back. Three people walk down the middle of the corridor. The woman in the blue uniform walks in the front as she grabs the woman in the black trousers on the right. The man in the grey uniform walks behind.", "events": [ { "event": "The video begins with the camera showing a prison. A woman in a blue uniform stands by the iron gate of the prison and holds a set of keys. A man in a grey uniform stands on the other side of the iron gate with his left hand on it and his head turned to look into the room. A number of notices and signs can be seen hanging on the walls in the background, and the lighting is rather dim. In the bottom right corner of the frame, a blurred woman sits in the room. The woman standing by the gate then takes a few steps back. The woman sitting in the room prepares to stand up.", "visual_elements": [ { "content": "The camera shows a prison.", "type": "scene", "weight": 2 }, { "content": "A woman in a blue uniform stands by the iron gate of the prison.", "type": "attribute", "weight": 3 }, { "content": "The woman in the blue uniform holds a set of keys.", "type": "attribute", "weight": 3 }, { "content": "A man in a grey uniform stands on the other side of the iron gate.", "type": "attribute", "weight": 3 }, { "content": "The man rests his left hand on the iron gate.", "type": "attribute", "weight": 2 }, { "content": "The man turns his head to look into the room.", "type": "attribute", "weight": 2 }, { "content": "A number of notices and signs can be seen hanging on the walls in the background, and the lighting is rather dim.", "type": "scene", "weight": 1 }, { "content": "In the bottom right corner of the frame, a blurred woman sits in the room.", "type": "attribute", "weight": 2 }, { "content": "The woman standing by the gate takes a few steps back.", "type": "action", "weight": 3 }, { "content": "The woman sitting in the room prepares to stand up.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera switches to the interior of a prison cell which has straw mats on the floor. A woman in a grey shirt and black trousers crouches in the corner. She stands up holding onto the wall behind her and walks towards the iron gate. Looking down from above, the camera shows the layout of the cell, including a sink and a blue bucket.", "visual_elements": [ { "content": "The camera switches to the interior of a prison cell.", "type": "camera", "weight": 3 }, { "content": "There are straw mats on the floor.", "type": "scene", "weight": 1 }, { "content": "A woman in a grey shirt and black trousers crouches in the corner.", "type": "attribute", "weight": 3 }, { "content": "She stands up holding onto the wall behind her and walks towards the iron gate.", "type": "action", "weight": 3 }, { "content": "Looking down from above, the camera shows the layout of the cell, including a sink and a blue bucket.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then switches back to the outside of the gate again. The woman in black trousers stops at the gate and looks to her right. The woman in the blue uniform grabs the left arm of the woman in the black trousers with her right hand. The man in the grey uniform closes the iron gate.", "visual_elements": [ { "content": "The camera switches back to the outside of the gate again.", "type": "camera", "weight": 3 }, { "content": "The woman in black trousers stops at the gate.", "type": "action", "weight": 3 }, { "content": "The woman in black trousers looks to her right.", "type": "action", "weight": 3 }, { "content": "The woman in the blue uniform grabs the left arm of the woman in the black trousers with her right hand.", "type": "action", "weight": 3 }, { "content": "The man in the grey uniform closes the iron gate.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to a corridor and pans back. Three people walk down the middle of the corridor. The woman in the blue uniform walks in the front as she grabs the woman in the black trousers on the right. The man in the grey uniform walks behind.", "visual_elements": [ { "content": "The camera switches to a corridor.", "type": "camera", "weight": 3 }, { "content": "The camera pans back.", "type": "camera", "weight": 3 }, { "content": "Three people walk down the middle of the corridor.", "type": "attribute", "weight": 3 }, { "content": "The woman in the blue uniform walks in the front as she grabs the woman in the black trousers on the right.", "type": "action", "weight": 2 }, { "content": "The man in the grey uniform walks behind.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0100", "video_path": "DREAM-1K/348.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.62, "resolution": { "width": 1872, "height": 800 }, "caption": "The video begins with a man on the left of the frame wearing a blue shirt and grey trousers. He stands in front of a metal shelf with many black and white photos hanging on it. The man holds a camera and adjusts it before bending down to take a photo of the woman in front of him. The woman on the right of the frame wears a dark red coat and a green shirt. She moves her fingers slightly. The two faces each other at a distance and both turn sideways to the camera. In the background, the shelf is lined with various books, papers and some metal boxes. A light above the shelf illuminates the entire scene, making the photos and items clearly visible.\nThe camera then switches to a close-up of the woman. Her expression looks a little serious as her eyes look firmly ahead and she nods towards the camera.", "events": [ { "event": "The video begins with a man on the left of the frame wearing a blue shirt and grey trousers. He stands in front of a metal shelf with many black and white photos hanging on it. The man holds a camera and adjusts it before bending down to take a photo of the woman in front of him. The woman on the right of the frame wears a dark red coat and a green shirt. She moves her fingers slightly. The two faces each other at a distance and both turn sideways to the camera. In the background, the shelf is lined with various books, papers and some metal boxes. A light above the shelf illuminates the entire scene, making the photos and items clearly visible.", "visual_elements": [ { "content": "A man on the left of the frame wears a blue shirt and grey trousers.", "type": "attribute", "weight": 2 }, { "content": "The man stands in front of a metal shelf.", "type": "attribute", "weight": 3 }, { "content": "Many black and white photos hang on the metal shelf.", "type": "scene", "weight": 1 }, { "content": "The man holds a camera.", "type": "attribute", "weight": 3 }, { "content": "The man adjusts the camera.", "type": "action", "weight": 3 }, { "content": "The man bends down to take a photo of the woman in front of him.", "type": "action", "weight": 3 }, { "content": "The woman on the right of the frame wears a dark red coat and a green shirt.", "type": "attribute", "weight": 2 }, { "content": "The woman moves her fingers slightly.", "type": "attribute", "weight": 3 }, { "content": "The two faces each other at a distance and both turn sideways to the camera.", "type": "attribute", "weight": 3 }, { "content": "The shelf is lined with various books, papers and some metal boxes.", "type": "scene", "weight": 1 }, { "content": "A light above the shelf illuminates the entire scene, making the photos and items clearly visible.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then switches to a close-up of the woman. Her expression looks a little serious as her eyes look firmly ahead and she nods towards the camera.", "visual_elements": [ { "content": "The camera then switches to a close-up of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman's expression looks a little serious.", "type": "attribute", "weight": 2 }, { "content": "The woman's eyes look firmly ahead.", "type": "attribute", "weight": 2 }, { "content": "Thw woman nods towards the camera.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0101", "video_path": "DREAM-1K/351.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 20.75, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a man seated at an outdoor cafe. He wears a light-colored plaid shirt and has neatly styled hair. The man moves his lips and falls into contemplation, gazing ahead. In the background, there are several other patrons sitting at different tables, dressed in casual attire.\nNext, the scene cuts to an indoor environment with dim lighting. The man leans against the kitchen counter, looking ahead, dressed in the same light-colored plaid shirt and shorts. His right hand holds a glass of water against his chest. The man's chest rises and falls gently with each breath. In the background of the kitchen, various household items and decorations are visible, such as pendant lights, flowerpots, and a dining table.\nThen, the camera cuts to a balcony where the man stands by a floor-to-ceiling window, facing away from the camera and gazing out at the cityscape the window. The view outside is somewhat blurred, but some tall buildings and streets are visible. The weather appears gloomy, and there are traces of raindrops on the window.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man seated at an outdoor cafe. He wears a light-colored plaid shirt and has neatly styled hair. The man moves his lips and falls into contemplation, gazing ahead. In the background, there are several other patrons sitting at different tables, dressed in casual attire.", "visual_elements": [ { "content": "The camera focuses on a man seated at an outdoor cafe.", "type": "camera", "weight": 3 }, { "content": "The man wears a light-colored plaid shirt and has neatly styled hair.", "type": "attribute", "weight": 2 }, { "content": "The man moves his lips.", "type": "action", "weight": 2 }, { "content": "The man gazes ahead, and falls into contemplation.", "type": "action", "weight": 2 }, { "content": "In the background, there are several other patrons sitting at different tables, dressed in casual attire.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the scene cuts to an indoor environment with dim lighting. The man leans against the kitchen counter, looking ahead, dressed in the same light-colored plaid shirt and shorts. His right hand holds a glass of water against his chest. The man's chest rises and falls gently with each breath. In the background of the kitchen, various household items and decorations are visible, such as pendant lights, flowerpots, and a dining table.", "visual_elements": [ { "content": "The camera cuts to an indoor environment.", "type": "camera", "weight": 3 }, { "content": "The lighting in the indoor environment is relatively dim.", "type": "scene", "weight": 1 }, { "content": "The man leans against the kitchen counter, looking ahead.", "type": "attribute", "weight": 3 }, { "content": "The man dressed in the same light-colored plaid shirt and shorts.", "type": "attribute", "weight": 2 }, { "content": "The man's right hand holds a glass of water against his chest.", "type": "attribute", "weight": 2 }, { "content": "The man's chest rises and falls gently with each breath.", "type": "action", "weight": 1 }, { "content": "In the background of the kitchen, various household items and decorations are visible, such as pendant lights, flowerpots, and a dining table.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera cuts to a balcony where the man stands by a floor-to-ceiling window, facing away from the camera and gazing out at the cityscape the window. The view outside is somewhat blurred, but some tall buildings and streets are visible. The weather appears gloomy, and there are traces of raindrops on the window.", "visual_elements": [ { "content": "The camera cuts to a balcony.", "type": "camera", "weight": 3 }, { "content": "The man stands by a floor-to-ceiling window, facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The man gazes out at the cityscape beyond the window.", "type": "attribute", "weight": 2 }, { "content": "The view outside is somewhat blurred, but some tall buildings and streets are visible.", "type": "scene", "weight": 1 }, { "content": "The weather is somewhat gloomy, and there are traces of raindrops on the window.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0102", "video_path": "DREAM-1K/352.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 11.62, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a high-rise building, with the distant city lights visible in the nighttime background. The shot gradually pulls back, zeroing in on two balconies of the building. On the lower balcony, a man stands barefoot, wearing a light-colored shirt and dark shorts. He leans forward, gripping the railing with both hands. A small lamp on the balcony casts a soft glow, accompanied by a few chairs and a small table.\nOn the upper balcony, there are three people: a woman wearing a green top and dark shorts, and a little girl dressed in dark clothing. The woman holds the railing with her left hand, turning her body to look at the little girl. The little girl grasps the railing with both hands, gazing outside. Behind her, another little girl stands near the balcony doorway frame.\nThe man on the lower balcony looks to his right. Subsequently, he raises a bottle with his right hand and tilts his head back drink alcohol. Then, he leans back against the railing again.", "events": [ { "event": "At the beginning of the video, the camera focuses on a high-rise building, with the distant city lights visible in the nighttime background. The shot gradually pulls back, zeroing in on two balconies of the building. On the lower balcony, a man stands barefoot, wearing a light-colored shirt and dark shorts. He leans forward, gripping the railing with both hands. A small lamp on the balcony casts a soft glow, accompanied by a few chairs and a small table.", "visual_elements": [ { "content": "The camera focuses on a high-rise building.", "type": "camera", "weight": 3 }, { "content": "The distant city lights visible in the nighttime background.", "type": "scene", "weight": 1 }, { "content": "The shot zooms out gradually, focusing on two balconies of the building.", "type": "camera", "weight": 3 }, { "content": "A man stands barefoot on the balcony, wearing a light-colored shirt and dark shorts.", "type": "attribute", "weight": 3 }, { "content": "The man grips the railing with both hands.", "type": "attribute", "weight": 3 }, { "content": "The man leans forward.", "type": "attribute", "weight": 3 }, { "content": "There is a small lamp on the balcony, emitting a soft glow, with a few chairs and a small table nearby.", "type": "scene", "weight": 1 } ] }, { "event": "On the upper balcony, there are three people: a woman wearing a green top and dark shorts, and a little girl dressed in dark clothing. The woman holds the railing with her left hand, turning her body to look at the little girl. The little girl grasps the railing with both hands, gazing outside. Behind her, another little girl stands near the balcony doorway frame.", "visual_elements": [ { "content": "On the upper balcony, there are three people.", "type": "attribute", "weight": 3 }, { "content": "A woman wearing a green top and dark shorts。", "type": "attribute", "weight": 1 }, { "content": "A little girl next to the woman is dressed in dark clothing.", "type": "attribute", "weight": 1 }, { "content": "The woman holds the railing with her left hand.", "type": "attribute", "weight": 2 }, { "content": "The woman turnis her body to look at the little girl.", "type": "attribute", "weight": 2 }, { "content": "The little girl grasps the railing with both hands.", "type": "attribute", "weight": 2 }, { "content": "The little girl gazes outside.", "type": "attribute", "weight": 2 }, { "content": "Behind her, another little girl stands near the balcony doorway frame.", "type": "attribute", "weight": 2 } ] }, { "event": "The man on the lower balcony looks to his right. Subsequently, he raises a bottle with his right hand and tilts his head back drink alcohol. Then, he leans back against the railing again.", "visual_elements": [ { "content": "The man on the lower balcony looks to his right.", "type": "action", "weight": 2 }, { "content": "The man raises a liquor bottle with his right hand.", "type": "action", "weight": 3 }, { "content": "The man tilts his head back to drink alcohol.", "type": "action", "weight": 3 }, { "content": "The man leans back against the railing again.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0103", "video_path": "DREAM-1K/353.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 9.75, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on the interior of a small shop, where shelves on both sides are filled with various goods. The lighting in the shop is dim, with blue lights illuminating the shelves. At the entrance of the shop, the outside street can be seen, where a few motorcycles and pedestrians pass by.\nThere are two men in the frame. The man on the left is wearing a green long-sleeved shirt and dark pants, while the man on the right is dressed in a gray short-sleeved T-shirt and gray shorts. The man in the green shirt is holding two lunch boxes. The man in the gray T-shirt places his left hand on the glass door of the shelf and takes a lunch box from the man in the green shirt with his right hand, speaking to him.\nNext, the man in the green shirt lowers his head, cradling a lunch box in both hands as he walks into the depths of the shop and sits down by a wall. The man in the gray T-shirt also turns and follows the man in the green shirt for a few steps, then sits down to the left of him.\nThe camera pans with the actions of the two men. After sitting down, the man in the green shirt opens the lunch box in his hand. The man in the gray T-shirt, after sitting down, looks at the man in the green shirt next to him and speaks, then smiles as he leans down to open his lunch box. At this moment, the man in the green shirt turns to look at the man in the gray T-shirt.", "events": [ { "event": "At the beginning of the video, the camera focuses on the interior of a small shop, where shelves on both sides are filled with various goods. The lighting in the shop is dim, with blue lights illuminating the shelves. At the entrance of the shop, the outside street can be seen, where a few motorcycles and pedestrians pass by.", "visual_elements": [ { "content": "The camera focuses on the interior of a small shop.", "type": "camera", "weight": 3 }, { "content": "The shelves of the shop on both sides are filled with various goods.", "type": "scene", "weight": 2 }, { "content": "The lighting in the shop is dim, with blue lights illuminating the shelves.", "type": "scene", "weight": 1 }, { "content": "At the entrance of the shop, the outside street can be seen, where a few motorcycles and pedestrians pass by.", "type": "scene", "weight": 1 } ] }, { "event": "There are two men in the frame. The man on the left is wearing a green long-sleeved shirt and dark pants, while the man on the right is dressed in a gray short-sleeved T-shirt and gray shorts. The man in the green shirt is holding two lunch boxes. The man in the gray T-shirt places his left hand on the glass door of the shelf and takes a lunch box from the man in the green shirt with his right hand, speaking to him.", "visual_elements": [ { "content": "There are two men in the frame.", "type": "attribute", "weight": 3 }, { "content": "The man on the left is wearing a green long-sleeved shirt and dark pants.", "type": "attribute", "weight": 2 }, { "content": "The man on the right is dressed in a gray short-sleeved T-shirt and gray shorts.", "type": "attribute", "weight": 2 }, { "content": "The man in the green shirt is holding two lunch boxes.", "type": "attribute", "weight": 2 }, { "content": "The man in the gray T-shirt places his left hand on the glass door of the shelf.", "type": "attribute", "weight": 2 }, { "content": "The man in the gray T-shirt takes a lunch box from the man in the green shirt with his right hand.", "type": "action", "weight": 3 }, { "content": "The man in the gray T-shirt speaks to him.", "type": "action", "weight": 2 } ] }, { "event": "Next, the man in the green shirt lowers his head, cradling a lunch box in both hands as he walks into the depths of the shop and sits down by a wall. The man in the gray T-shirt also turns and follows the man in the green shirt for a few steps, then sits down to the left of him.", "visual_elements": [ { "content": "The man in the green shirt lowers his head.", "type": "action", "weight": 2 }, { "content": "The man in the green shirt cradles a lunch box in both hands as he walks into the depths of the shop and sits down by a wall.", "type": "action", "weight": 3 }, { "content": "The man in the gray T-shirt also turns and follows the man in the green shirt for a few steps.", "type": "action", "weight": 3 }, { "content": "The man in the gray T-shirt sits down to the left of The man in the green shirt.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans with the actions of the two men. After sitting down, the man in the green shirt opens the lunch box in his hand. The man in the gray T-shirt, after sitting down, looks at the man in the green shirt next to him and speaks, then smiles as he leans down to open his lunch box. At this moment, the man in the green shirt turns to look at the man in the gray T-shirt.", "visual_elements": [ { "content": "The camera pans with the actions of the two men.", "type": "camera", "weight": 3 }, { "content": "The man in the green shirt opens the lunch box in his hand after sitting down.", "type": "action", "weight": 3 }, { "content": "The man in the gray T-shirt, after sitting down, looks at the man in the green shirt next to him and speaks", "type": "action", "weight": 3 }, { "content": "The man in the gray T-shirt smiles as he leans down to open his lunch box.", "type": "action", "weight": 3 }, { "content": "the man in the green shirt turns to look at the man in the gray T-shirt.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0104", "video_path": "DREAM-1K/354.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 9.25, "resolution": { "width": 1920, "height": 960 }, "caption": "At the beginning of the video, the camera focuses on a corner of the kitchen. In the frame, there is a microwave with some cans and medicine bottles stacked above it. The display screen of the microwave shows a countdown, starting from 9 seconds and gradually decreasing to 8, then 7. Inside the microwave, a plate with food is rotating.\nNext, the camera cuts to a panoramic view of a room where there is a washing machine. A man in a gray T-shirt and shorts is looking down at a laptop computer. He leans against the table, with his left hand resting on it and his right hand operating the computer. The windows in the room are equipped with louvers, and light shines into the room through the gaps in the louvers, illuminating part of it.\nAt the end of the video, the camera cuts to the man's facial expression. He moves his lips and falls into contemplation, looking downwards.", "events": [ { "event": "At the beginning of the video, the camera focuses on a corner of the kitchen. In the frame, there is a microwave with some cans and medicine bottles stacked above it. The display screen of the microwave shows a countdown, starting from 9 seconds and gradually decreasing to 8, then 7. Inside the microwave, a plate with food is rotating.", "visual_elements": [ { "content": "The camera focuses on a corner of the kitchen.", "type": "camera", "weight": 3 }, { "content": "In the frame, there is a microwave.", "type": "attribute", "weight": 3 }, { "content": "there are some cans and medicine bottles stacked above the microwave.", "type": "attribute", "weight": 1 }, { "content": "The display screen of the microwave shows a countdown, starting from 9 seconds and gradually decreasing to 8, then 7.", "type": "action", "weight": 3 }, { "content": "Inside the microwave, a plate with food is rotating.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a panoramic view of a room where there is a washing machine. A man in a gray T-shirt and shorts is looking down at a laptop computer. He leans against the table, with his left hand resting on it and his right hand operating the computer. The windows in the room are equipped with louvers, and light shines into the room through the gaps in the louvers, illuminating part of it.", "visual_elements": [ { "content": "The camera cuts to a panoramic view of a room.", "type": "camera", "weight": 3 }, { "content": "There is a washing machine in the room.", "type": "scene", "weight": 1 }, { "content": "A man is dressed in a gray T-shirt and shorts.", "type": "attribute", "weight": 3 }, { "content": "The man is looking down at a laptop computer.", "type": "attribute", "weight": 3 }, { "content": "The man leans against the table", "type": "attribute", "weight": 3 }, { "content": "The man props his left hand on the table.", "type": "attribute", "weight": 3 }, { "content": "The man operates the computer with his right hand.", "type": "action", "weight": 3 }, { "content": "The windows in the room are equipped with louvers, and light shines into the room through the gaps in the louvers, illuminating part of it.", "type": "scene", "weight": 1 } ] }, { "event": "At the end of the video, the camera cuts to the man's facial expression. He moves his lips and falls into contemplation, looking downwards.", "visual_elements": [ { "content": "The camera cuts to the man's facial expression.", "type": "camera", "weight": 3 }, { "content": "The man moves his lips.", "type": "action", "weight": 2 }, { "content": "The man falls into contemplation, looking downwards.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0105", "video_path": "DREAM-1K/355.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 11.12, "resolution": { "width": 1920, "height": 960 }, "caption": "At the beginning of the video, the camera focuses on two men standing beside a fence. The man on the left wears a brown jacket paired with a yellow T-shirt, while the man on the right is dressed in a blue jacket and sports a blue-striped wool cap. Both have their hands tucked into their pockets, gazing at the ostriches inside the fence. The backdrop is an expansive outdoor area with trees and hills in the distance. In the foreground, an ostrich within the fence turns its head to look at the two men by the fence. The man in the brown jacket shifts his gaze to the right and ahead, looking into the distance. Another ostrich appears behind them.\nThe camera cuts to the back view of the two men. More ostriches inside the fence come into view. The man in the cap turns his head to speak to the man next to him, and the man in the brown jacket responds with a smile. The man in the brown jacket occasionally turns his head back and forth.", "events": [ { "event": "At the beginning of the video, the camera focuses on two men standing beside a fence. The man on the left wears a brown jacket paired with a yellow T-shirt, while the man on the right is dressed in a blue jacket and sports a blue-striped wool cap. Both have their hands tucked into their pockets, gazing at the ostriches inside the fence. The backdrop is an expansive outdoor area with trees and hills in the distance. In the foreground, an ostrich within the fence turns its head to look at the two men by the fence. The man in the brown jacket shifts his gaze to the right and ahead, looking into the distance. Another ostrich appears behind them.", "visual_elements": [ { "content": "The camera focuses on two men.", "type": "camera", "weight": 3 }, { "content": "They stand beside a fence.", "type": "attribute", "weight": 3 }, { "content": "The man on the left wears a brown jacket paired with a yellow T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The man on the right is dressed in a blue jacket and sports a blue-striped wool cap.", "type": "attribute", "weight": 2 }, { "content": "They have their hands tucked into their pockets.", "type": "attribute", "weight": 1 }, { "content": "They gaze at the ostriches inside the fence.", "type": "attribute", "weight": 3 }, { "content": "The backdrop is an expansive outdoor area with trees and hills in the distance.", "type": "scene", "weight": 1 }, { "content": "In the foreground, there appears an ostrich inside a fence.", "type": "scene", "weight": 3 }, { "content": "The ostrich turns its head to look at the two people by the fence.", "type": "action", "weight": 3 }, { "content": "The man in the brown jacket shifts his gaze to the right and ahead, looking into the distance.", "type": "action", "weight": 2 }, { "content": "Another ostrich appears behind them.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to the back view of the two men. More ostriches inside the fence come into view. The man in the cap turns his head to speak to the man next to him, and the man in the brown jacket responds with a smile. The man in the brown jacket occasionally turns his head back and forth.", "visual_elements": [ { "content": "The camera cuts to the back view of the two men.", "type": "camera", "weight": 3 }, { "content": "More ostriches inside the fence come into view.", "type": "scene", "weight": 2 }, { "content": "The man in the cap turns his head to speak to the man next to him.", "type": "action", "weight": 3 }, { "content": "The man in the brown jacket responds with a smile.", "type": "action", "weight": 3 }, { "content": "The man in the brown jacket occasionally turns his head back and forth.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0106", "video_path": "DREAM-1K/356.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.62, "resolution": { "width": 1920, "height": 960 }, "caption": "At the beginning of the video, the scene shows two men sitting on a sofa, facing away from the camera, watching television. The TV screen is displaying a movie, with a character wearing a white outfit visible on it.\nThe camera cuts to a front-facing view of the two men. The man on the left side of the screen wears a dark-colored sweater, while the man on the right wears a light-colored T-shirt. Both are intently watching the television, with the left man's arm draped over the sofa's backrest. Suddenly, the left man widens his eyes, revealing a surprised expression. Subsequently, the left man raises his left hand, and the right man raises his right hand. They both lift and lower their raised hands twice simultaneously. Finally, the right man drops his right hand, while the left man still keeps his left hand raised and turns his head to look at the right man.", "events": [ { "event": "At the beginning of the video, the scene shows two men sitting on a sofa, facing away from the camera, watching television. The TV screen is displaying a movie, with a character wearing a white outfit visible on it.", "visual_elements": [ { "content": "The scene shows two men sitting on a sofa.", "type": "scene", "weight": 3 }, { "content": "The two men faces away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The two men are watching television.", "type": "attribute", "weight": 3 }, { "content": "The TV screen is displaying a movie.", "type": "scene", "weight": 2 }, { "content": "There is a character wearing a white outfit on the screen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to a front-facing view of the two men. The man on the left side of the screen wears a dark-colored sweater, while the man on the right wears a light-colored T-shirt. Both are intently watching the television, with the left man's arm draped over the sofa's backrest. Suddenly, the left man widens his eyes, revealing a surprised expression. Subsequently, the left man raises his left hand, and the right man raises his right hand. They both lift and lower their raised hands twice simultaneously. Finally, the right man drops his right hand, while the left man still keeps his left hand raised and turns his head to look at the right man.", "visual_elements": [ { "content": "The camera cuts to a front-facing view of the two men.", "type": "camera", "weight": 3 }, { "content": "The man on the left side of the screen wears a dark-colored sweater.", "type": "attribute", "weight": 2 }, { "content": "The man on the right wears a light-colored T-shirt.", "type": "attribute", "weight": 2 }, { "content": "Both OF THEMare intently watching the television.", "type": "attribute", "weight": 2 }, { "content": "The left man's arm has his arm draped over the backREST of the sofa.", "type": "attribute", "weight": 1 }, { "content": "The left man widens his eyes, revealing a surprised expression.", "type": "action", "weight": 3 }, { "content": "The left man raises his left hand.", "type": "action", "weight": 3 }, { "content": "The right man raises his right hand.", "type": "action", "weight": 3 }, { "content": "They both lift and lower their raised hands twice.", "type": "action", "weight": 3 }, { "content": "The right man drops his right hand.", "type": "action", "weight": 3 }, { "content": "The left man still keeps his left hand raised.", "type": "action", "weight": 3 }, { "content": "The left man turns his head to look at the right man.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0107", "video_path": "DREAM-1K/357.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.0, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on a large wooden double door adorned with metal hinges and a door knocker. The door is surrounded by blue-green walls that feature golden decorative patterns.\nThen the camera pans downwards and then backwards. Then, the door opens, and several figures walk in.\nFirst to appear is a little dog, closely followed by several young people dressed in various styles. Their outfits are unique, with bright colors and diverse materials. A young man dressed in white and gray stands on the left, pushing open the door on the left. In the middle is a woman wearing a tight-fitting blue outfit, with blue hair to match. On the right is a woman dressed in blue, pushing open the door on the right. Behind them are other young people wearing eccentric costumes. They come in and look around, observing the environment inside.\nThe camera then cuts to a view from behind them, showing that they have entered a richly decorated corridor. These young people continue walking forward, and the camera follows their steps, revealing the full view of the corridor. Along both sides of the corridor are displayed armor and shields, while the walls are adorned with various decorative patterns and lighting fixtures. At the end of the corridor is a large window featuring stained glass patterns.", "events": [ { "event": "At the beginning of the video, the camera focuses on a large wooden double door adorned with metal hinges and a door knocker. The door is surrounded by blue-green walls that feature golden decorative patterns.", "visual_elements": [ { "content": "The camera focuses on a large wooden double door.", "type": "camera", "weight": 3 }, { "content": "The door is adorned with metal hinges and a door knocker.", "type": "attribute", "weight": 2 }, { "content": "The door is surrounded by blue-green walls that feature golden decorative patterns.", "type": "attribute", "weight": 2 } ] }, { "event": "Then the camera pans downwards and then backwards. Then, the door opens, and several figures walk in.", "visual_elements": [ { "content": "The camera pans downwards and then backwards.", "type": "camera", "weight": 3 }, { "content": "The door opens.", "type": "action", "weight": 3 }, { "content": "Several figures walk in.", "type": "action", "weight": 3 } ] }, { "event": "First to appear is a little dog, closely followed by several young people dressed in various styles. Their outfits are unique, with bright colors and diverse materials. A young man dressed in white and gray stands on the left, pushing open the door on the left. In the middle is a woman wearing a tight-fitting blue outfit, with blue hair to match. On the right is a woman dressed in blue, pushing open the door on the right. Behind them are other young people wearing eccentric costumes. They come in and look around, observing the environment inside.", "visual_elements": [ { "content": "First to appear is a little dog.", "type": "attribute", "weight": 3 }, { "content": "Closely following are several young people dressed in various styles.", "type": "action", "weight": 3 }, { "content": "Their outfits are unique, with bright colors and diverse materials.", "type": "attribute", "weight": 3 }, { "content": "A young man dressed in white and gray stands on the left dressed in white and gray stands on the left.", "type": "attribute", "weight": 2 }, { "content": "The young man pushes open the door on the left.", "type": "action", "weight": 2 }, { "content": "In the middle is a woman wearing a tight-fitting blue outfit, with blue hair to match.", "type": "attribute", "weight": 2 }, { "content": "On the right is a woman dressed in blue.", "type": "attribute", "weight": 2 }, { "content": "The woman on the right pushes open the door on her right.", "type": "action", "weight": 2 }, { "content": "Other young people wearing eccentric costumes are behind.", "type": "action", "weight": 2 }, { "content": "They come in and look around, observing the environment inside.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a view from behind them, showing that they have entered a richly decorated corridor. These young people continue walking forward, and the camera follows their steps, revealing the full view of the corridor. Along both sides of the corridor are displayed armor and shields, while the walls are adorned with various decorative patterns and lighting fixtures. At the end of the corridor is a large window featuring stained glass patterns.", "visual_elements": [ { "content": "The camera then cuts to a view from behind them.", "type": "camera", "weight": 3 }, { "content": "They enter a richly decorated corridor.", "type": "action", "weight": 3 }, { "content": "These young people continue walking forward.", "type": "action", "weight": 3 }, { "content": "The camera follows their steps.", "type": "camera", "weight": 3 }, { "content": "Along both sides of the corridor are displayed armor and shields", "type": "scene", "weight": 1 }, { "content": "the walls are adorned with various decorative patterns and lighting fixtures.", "type": "scene", "weight": 1 }, { "content": "At the end of the corridor is a large window", "type": "scene", "weight": 2 }, { "content": "There are colorful glass patterns on the window.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0108", "video_path": "DREAM-1K/358.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 5.38, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on a pair of hands wearing leather gloves adorned with metal rivets, with several bracelets on the wrists. These hands are picking black berries from a cluster of green leaves. The backdrop is a lush, verdant foliage.\nNext, the camera pans upwards, revealing that the owner of the hands is a man dressed in brown leather clothing. The man has slightly curly long hair and wears a black headband. His right hand picks a berry from a tree. The man holds the picked berry in his hand and smiles at it. Then he puts the berry into his mouth. The man smiles and looks up towards the right-upper side. He slowly chews the berry, savoring its taste.", "events": [ { "event": "At the beginning of the video, the camera focuses on a pair of hands wearing leather gloves adorned with metal rivets, with several bracelets on the wrists. These hands are picking black berries from a cluster of green leaves. The backdrop is a lush, verdant foliage.", "visual_elements": [ { "content": "the camera focuses on a pair of hands wearing leather gloves.", "type": "camera", "weight": 3 }, { "content": "These hands are picking black berries from a cluster of green leaves.", "type": "action", "weight": 3 }, { "content": "The gloves are adorned with metal rivet decorations.", "type": "attribute", "weight": 1 }, { "content": "There are several bracelets on her wrist.", "type": "attribute", "weight": 1 }, { "content": "The backdrop is a lush, verdant foliage.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera pans upwards, revealing that the owner of the hands is a man dressed in brown leather clothing. The man has slightly curly long hair and wears a black headband. His right hand picks a berry from a tree. The man holds the picked berry in his hand and smiles at it. Then he puts the berry into his mouth. The man smiles and looks up towards the right-upper side. He slowly chews the berry, savoring its taste.", "visual_elements": [ { "content": "The camera pans upwards.", "type": "camera", "weight": 3 }, { "content": "The owner of the hands is a man dressed in brown leather clothing.", "type": "attribute", "weight": 2 }, { "content": "The man has slightly curly long hair and wears a black headband.", "type": "attribute", "weight": 2 }, { "content": "His right hand picks a berry from a tree.", "type": "action", "weight": 3 }, { "content": "The man holds the picked berry in his hand.", "type": "action", "weight": 2 }, { "content": "The man smiles at the berry.", "type": "attribute", "weight": 3 }, { "content": "The man puts the berry into his mouth.", "type": "action", "weight": 3 }, { "content": "The man smiles and looks up towards the right-upper side.", "type": "action", "weight": 3 }, { "content": "He slowly chews the berry, savoring its taste.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0109", "video_path": "DREAM-1K/359.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 8.62, "resolution": { "width": 1920, "height": 1024 }, "caption": "At the start of the video, the camera focuses on a foot wearing an old leather shoe, stepping on a beach covered with cobblestones. A small crab crawls on one of the stones.\nNext, the camera cuts to a distant view, showing two women sitting on the beach with a steep rock wall and the coastline in the distance as the background. One woman wears a dark-colored long dress, sitting on the ground, while the other woman is dressed in a dark coat and light-colored pants, crouching on the ground with a basket and a purse placed nearby.\nThe camera cuts to a close-up once again, revealing a woman crouching with a hammer in her hand, striking a rock. As the camera gradually pans up, we see that the crouching woman has messyd hair, and she lowers her head and knocks on the stone intently and forcefully.", "events": [ { "event": "At the start of the video, the camera focuses on a foot wearing an old leather shoe, stepping on a beach covered with cobblestones. A small crab crawls on one of the stones.", "visual_elements": [ { "content": "The camera focuses on a foot wearing an old leather shoe.", "type": "camera", "weight": 3 }, { "content": "The feet step on a beach covered with cobblestones.", "type": "attribute", "weight": 3 }, { "content": "A small crab crawls on one of the stones.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a distant view, showing two women sitting on the beach with a steep rock wall and the coastline in the distance as the background. One woman wears a dark-colored long dress, sitting on the ground, while the other woman is dressed in a dark coat and light-colored pants, crouching on the ground with a basket and a purse placed nearby.", "visual_elements": [ { "content": "The camera cuts to a distant view.", "type": "camera", "weight": 3 }, { "content": "The scene shows two women sitting on the beach.", "type": "scene", "weight": 3 }, { "content": "The background features a steep rock wall and the coastline in the distance.", "type": "scene", "weight": 1 }, { "content": "One woman wears a dark-colored long dress.", "type": "attribute", "weight": 2 }, { "content": "The woman in a dark-colored long dress sits on the ground.", "type": "attribute", "weight": 3 }, { "content": "The other woman is dressed in a dark coat and light-colored pants,", "type": "attribute", "weight": 2 }, { "content": "The woman in a dark coat and light-colored pants crouches on the ground.", "type": "attribute", "weight": 3 }, { "content": "Next to the woman in a dark coat and light-colored pants, there is a basket and a purse.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a close-up once again, revealing a woman crouching with a hammer in her hand, striking a rock. As the camera gradually pans up, we see that the crouching woman has messyd hair, and she lowers her head and knocks on the stone intently and forcefully.", "visual_elements": [ { "content": "The camera cuts to a close-up once again.", "type": "camera", "weight": 3 }, { "content": "The camera pans up.", "type": "camera", "weight": 3 }, { "content": "The crouching woman holds a hammer in her hand.", "type": "action", "weight": 3 }, { "content": "The crouching woman crouching is striking a rock.", "type": "action", "weight": 3 }, { "content": "This crouching woman has messy hair.", "type": "attribute", "weight": 2 }, { "content": "This crouching woman lowers her head and knocks on the stone intently and forcefully.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0110", "video_path": "DREAM-1K/360.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 5.0, "resolution": { "width": 1920, "height": 1024 }, "caption": "At the start of the video, the camera focuses on a woman seated in a chair. The woman wears a white shirt and a black jacket, with long hair. The background is a dimly lit room, containing a table with a lit candle on it. The candle's faint light barely illuminates the surrounding environment.\nSubsequently, the woman leans forward and blows out the candle. The room becomes darker. The woman lies down on her side, covering herself with a black blanket. Finally, she adjusts her position and closes her eyes to sleep.", "events": [ { "event": "At the start of the video, the camera focuses on a woman seated in a chair. The woman wears a white shirt and a black jacket, with long hair. The background is a dimly lit room, containing a table with a lit candle on it. The candle's faint light barely illuminates the surrounding environment.", "visual_elements": [ { "content": "The camera focuses on a woman seated in a chair.", "type": "camera", "weight": 3 }, { "content": "The woman wears a white shirt and a black jacket, with long hair.", "type": "attribute", "weight": 2 }, { "content": "The background is a dimly lit room", "type": "scene", "weight": 2 }, { "content": "There is a table in the room, with a lit candle placed on it.", "type": "scene", "weight": 2 }, { "content": "The candle's faint light barely illuminates the surrounding environment.", "type": "scene", "weight": 3 } ] }, { "event": "Subsequently, the woman leans forward and blows out the candle. The room becomes darker. The woman lies down on her side, covering herself with a black blanket. Finally, she adjusts her position and closes her eyes to sleep.", "visual_elements": [ { "content": "The woman leans forward.", "type": "action", "weight": 2 }, { "content": "The woman blows out the candle.", "type": "action", "weight": 3 }, { "content": "The woman lies down on her side.", "type": "action", "weight": 3 }, { "content": "The woman covers herself with a black blanket.", "type": "action", "weight": 3 }, { "content": "The woman adjusts her position.", "type": "action", "weight": 2 }, { "content": "The woman closes her eyes to sleep.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0111", "video_path": "DREAM-1K/361.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 5.62, "resolution": { "width": 1920, "height": 1024 }, "caption": "At the start of the video, the camera focuses on a pair of hands holding a piece of paper. The background is dimly lit, with a warm, yellowish glow, and there are some items on the table. Then, the hands bring the paper forward, and the left hand pours a powder from it into a cup on the table.\nFinally, the camera shifts to show the front face of a woman. She looks down, with a focused expression.", "events": [ { "event": "At the start of the video, the camera focuses on a pair of hands holding a piece of paper. The background is dimly lit, with a warm, yellowish glow, and there are some items on the table. Then, the hands bring the paper forward, and the left hand pours a powder from it into a cup on the table.", "visual_elements": [ { "content": "The camera focuses on a pair of hands.", "type": "camera", "weight": 3 }, { "content": "The pair of hands are holding a piece of paper.", "type": "attribute", "weight": 3 }, { "content": "The background is dimly lit, with a warm, yellowish glow.", "type": "scene", "weight": 2 }, { "content": "There are some items on the table.", "type": "scene", "weight": 1 }, { "content": "The hands bring the paper forward and fold it in half.", "type": "action", "weight": 3 }, { "content": "The left hand pours the powder from the paper into a cup on the table.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera shifts to show the front face of a woman. She looks down, with a focused expression.", "visual_elements": [ { "content": "The camera shifts to show the front face of a woman.", "type": "camera", "weight": 3 }, { "content": "The woman looks down, with a focused expression.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0112", "video_path": "DREAM-1K/362.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 3.62, "resolution": { "width": 1920, "height": 800 }, "caption": "At the start of the video, the camera focuses on a woman wearing a blue uniform adorned with pink decorations. A name tag is pinned to her left chest, and a white towel drapes over her right shoulder. She is in an indoor setting with a floor made up of wooden panels in various colors, and a pot of green plants stands nearby. The camera shoots from above, capturing the woman as she walks from the left-rear of the frame towards the camera, reaching the center of the screen, continually gazing upwards where the lights flicker intermittently.\nThen, the camera cuts to the flickering light tube. Suddenly, the light tube bursts, and the light tube and glass shards scatter and fly everywhere.", "events": [ { "event": "At the start of the video, the camera focuses on a woman wearing a blue uniform adorned with pink decorations. A name tag is pinned to her left chest, and a white towel drapes over her right shoulder. She is in an indoor setting with a floor made up of wooden panels in various colors, and a pot of green plants stands nearby. The camera shoots from above, capturing the woman as she walks from the left-rear of the frame towards the camera, reaching the center of the screen, continually gazing upwards where the lights flicker intermittently.", "visual_elements": [ { "content": "The camera focuses on a woman wearing a blue uniform.", "type": "camera", "weight": 3 }, { "content": "The woman's uniform is adorned with pink decorations, and a name tag is pinned to her left chest.", "type": "attribute", "weight": 2 }, { "content": "A white towel is draped over the woman's right shoulder.", "type": "attribute", "weight": 2 }, { "content": "The woman is in an indoor setting.", "type": "attribute", "weight": 3 }, { "content": "The floor is made up of wooden panels in various colors, and there is a pot of green plants nearby.", "type": "scene", "weight": 1 }, { "content": "The camera shoots from above.", "type": "camera", "weight": 3 }, { "content": "The woman walks from the left-rear of the frame towards the camera, reaching the center of the screen.", "type": "action", "weight": 3 }, { "content": "The woman gazes upwards continually.", "type": "action", "weight": 3 }, { "content": "The lights above flickers intermittently.", "type": "scene", "weight": 3 } ] }, { "event": "Then, the camera cuts to the flickering light tube. Suddenly, the light tube bursts, and the light tube and glass shards scatter and fly everywhere.", "visual_elements": [ { "content": "The camera cuts to the flickering light tube.", "type": "camera", "weight": 3 }, { "content": "The light tube bursts.", "type": "action", "weight": 3 }, { "content": "The light tube and glass shards scatter and fly everywhere.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0113", "video_path": "DREAM-1K/363.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 6.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a man sitting behind a desk. He is wearing a white short-sleeved shirt with blue and black patterns on it. There are some tools and equipment on the desk, and in the background is a row of shelves filled with various electronic devices and tools. A hand of the man reaches in from the left side of the screen.\nThe hand passes a ring to the seated man and then withdraws from the left side of the screen. The seated man extends his left hand to receive the object, holding the ring carefully with both hands to examine it. Subsequently, he puts the ring in his mouth and bites down on it. Then, he takes the ring out of his mouth and looks at it carefully once again. Finally, he holds the ring with both hands and looks up towards the right-upper side.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man sitting behind a desk. He is wearing a white short-sleeved shirt with blue and black patterns on it. There are some tools and equipment on the desk, and in the background is a row of shelves filled with various electronic devices and tools. A hand of the man reaches in from the left side of the screen.", "visual_elements": [ { "content": "The camera focuses on a man sitting behind a desk.", "type": "camera", "weight": 3 }, { "content": "Wearing a white short-sleeved shirt with blue and black patterns on it", "type": "attribute", "weight": 2 }, { "content": "On the table, there is a black lamp along with some other items. And in the background is a row of shelves, upon which various tools are placed.", "type": "scene", "weight": 1 }, { "content": "A hand of the man reaches in from the left side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "The hand passes a ring to the seated man and then withdraws from the left side of the screen. The seated man extends his left hand to receive the object, holding the ring carefully with both hands to examine it. Subsequently, he puts the ring in his mouth and bites down on it. Then, he takes the ring out of his mouth and looks at it carefully once again. Finally, he holds the ring with both hands and looks up towards the right-upper side.", "visual_elements": [ { "content": "The hand passes a ring to the seated man.", "type": "action", "weight": 3 }, { "content": "The hand withdraws from the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The seated man extends his left hand to receive the object.", "type": "action", "weight": 3 }, { "content": "The man holds the ring carefully with both hands to examine it.", "type": "action", "weight": 3 }, { "content": "The man puts the ring in his mouth and bites down on it.", "type": "action", "weight": 3 }, { "content": "The man takes the ring out of his mouth and looks at it carefully.", "type": "action", "weight": 3 }, { "content": "The man holds the ring with both hands.", "type": "action", "weight": 3 }, { "content": "The man looks up towards the right-upper side.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0114", "video_path": "DREAM-1K/364.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a young boy wearing a dark hoodie, crouching in a woodland. In the background, tall trees can be seen, with sunlight filtering through the leaves to create a dappled light and shadow effect. The camera shoots from below, showing his upper torso and part of his face. Subsequently, the boy picks up a coin with his right hand and looks down to examining it carefully.\nNext, the camera cuts to a close-up shot. The boy's fingers gently pinch the coin, scrutinizing it carefully. The coin bears intricate designs and patterns.Then, the boy slips the coin into his hand.\nAfterwards, the camera cuts back to the young boy. He stands up, his eyes scanning up and down the trunk of a tree in front of him.\nFinally, the camera cuts to a close-up of the tree trunk. Following that, a hand reaches out towards the trunk.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young boy wearing a dark hoodie, crouching in a woodland. In the background, tall trees can be seen, with sunlight filtering through the leaves to create a dappled light and shadow effect. The camera shoots from below, showing his upper torso and part of his face. Subsequently, the boy picks up a coin with his right hand and looks down to examining it carefully.", "visual_elements": [ { "content": "The camera focuses on a young boy.", "type": "camera", "weight": 3 }, { "content": "The boy wears a dark hoodie.", "type": "attribute", "weight": 1 }, { "content": "The boy crouches in a woodland.", "type": "attribute", "weight": 3 }, { "content": "In the background, tall trees can be seen, with sunlight filtering through the leaves to create a dappled light and shadow effect.", "type": "scene", "weight": 1 }, { "content": "The camera shoots from below, showing his upper torso and part of his face.", "type": "camera", "weight": 3 }, { "content": "The boy picks up a coin with his right hand.", "type": "action", "weight": 3 }, { "content": "The boy looks down and examines it carefully.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a close-up shot. The boy's fingers gently pinch the coin, scrutinizing it carefully. The coin bears intricate designs and patterns.Then, the boy slips the coin into his hand.", "visual_elements": [ { "content": "The camera cuts to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The boy's fingers gently pinch the coin.", "type": "action", "weight": 3 }, { "content": "The coin bears intricate designs and patterns.", "type": "attribute", "weight": 2 }, { "content": "The boy slips the coin into his hand.", "type": "action", "weight": 3 } ] }, { "event": "Afterwards, the camera cuts back to the young boy. He stands up, his eyes scanning up and down the trunk of a tree in front of him.", "visual_elements": [ { "content": "The camera cuts back to the young boy.", "type": "camera", "weight": 3 }, { "content": "The boy stands up.", "type": "action", "weight": 3 }, { "content": "The boy scans up and down the trunk of a tree in front of him.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to a close-up of the tree trunk. Following that, a hand reaches out towards the trunk.", "visual_elements": [ { "content": "The camera cuts to a close-up of the tree trunk.", "type": "camera", "weight": 3 }, { "content": "A hand reaches out towards the trunk.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0115", "video_path": "DREAM-1K/365.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on two males. The foreground figure is a young boy wearing a light-colored T-shirt, holding a wooden stick in his right hand and pointing it towards the distance. The background figure is a taller boy wearing a light-colored shirt, with his hands in front of him rubbing an object, looking towards his left-front side. Both boys are standing in a woodland, surrounded by tall trees and dense foliage, with sunlight filtering through the leaves and casting spots on the ground. The boy in the foreground is pointing ahead and talking. The boy in the background looks in the direction indicated by the foreground boy.\nThe camera then cuts to the direction pointed by the foreground boy's finger, revealing a tree trunk with a prominent \"X\" mark carved on it. The camera slowly zooms in.", "events": [ { "event": "At the beginning of the video, the camera focuses on two males. The foreground figure is a young boy wearing a light-colored T-shirt, holding a wooden stick in his right hand and pointing it towards the distance. The background figure is a taller boy wearing a light-colored shirt, with his hands in front of him rubbing an object, looking towards his left-front side. Both boys are standing in a woodland, surrounded by tall trees and dense foliage, with sunlight filtering through the leaves and casting spots on the ground. The boy in the foreground is pointing ahead and talking. The boy in the background looks in the direction indicated by the foreground boy.", "visual_elements": [ { "content": "The camera focuses on two males.", "type": "camera", "weight": 3 }, { "content": "The foreground figure is a young boy wearing a light-colored T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The boy holds a wooden stick in his right hand and points it towards the distance.", "type": "attribute", "weight": 3 }, { "content": "The background figure is a taller boy wearing a light-colored shirt.", "type": "attribute", "weight": 2 }, { "content": "The boy in the background has his hands in front of him, rubbing an object.", "type": "action", "weight": 2 }, { "content": "The boy in the background looks towards his left-front side.", "type": "attribute", "weight": 2 }, { "content": "Two persons are standing in a woodland", "type": "attribute", "weight": 3 }, { "content": "Surrounded by tall trees and dense foliage.", "type": "scene", "weight": 1 }, { "content": "The boy in the foreground points ahead and speaks.", "type": "action", "weight": 3 }, { "content": "The boy in the background looks in the direction indicated by the foreground boy.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to the direction pointed by the foreground boy's finger, revealing a tree trunk with a prominent \"X\" mark carved on it. The camera slowly zooms in.", "visual_elements": [ { "content": "The camera then cuts to the direction indicated by the foreground boy's finger.", "type": "camera", "weight": 3 }, { "content": "The scene shows a tree trunk with a prominent \"X\" mark carved on it.", "type": "attribute", "weight": 3 }, { "content": "The camera slowly zooms in.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0116", "video_path": "DREAM-1K/366.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.75, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a round dining table with several plates of food and cups of beverages arranged on it. Several people are seated around the table. Among them, a little girl wearing a pink dress is reaching into a bag of potato chips to grab some food. In the background, a Christmas tree decorated with colorful lights and some stacked cardboard boxes can be seen. A woman wearing a gray sweater quickly walks towards the girl from the right side of the screen, extending her right hand in the direction of the little girl. The woman's head bumps into a chandelier hanging above the dining table.\nNext, the camera cuts to the woman's full face. She opens her mouth and lets out a loud scream, while stepping backwards and reaching out her right hand to press against her forehead where she was hit.\nThe camera cuts to a girl with short hair, wearing a green sweater and earrings. She is chewing on something in her mouth. Subsequently, the camera cuts back to the woman in the gray sweater, who lowers her hand from her forehead and bends forward, extending her right hand to grab the bag of chips from the little girl.\nFinally, the camera cuts to the little girl. The woman in the gray sweater reaches out and grabs one end of the potato chip bag, while the little girl opens her mouth wide in a loud shout, pulling the other end of the bag with all her might.", "events": [ { "event": "At the beginning of the video, the camera focuses on a round dining table with several plates of food and cups of beverages arranged on it. Several people are seated around the table. Among them, a little girl wearing a pink dress is reaching into a bag of potato chips to grab some food. In the background, a Christmas tree decorated with colorful lights and some stacked cardboard boxes can be seen. A woman wearing a gray sweater quickly walks towards the girl from the right side of the screen, extending her right hand in the direction of the little girl. The woman's head bumps into a chandelier hanging above the dining table.", "visual_elements": [ { "content": "The camera focuses on a round dining table.", "type": "camera", "weight": 3 }, { "content": "Thereseveral plates of food and cups of beverages arranged on the table.", "type": "attribute", "weight": 2 }, { "content": "Several people are seated around the table.", "type": "attribute", "weight": 3 }, { "content": "Among them, a little girl wears a pink dress.", "type": "attribute", "weight": 3 }, { "content": "The little girl is reaching into a bag of potato chips to grab some food.", "type": "action", "weight": 3 }, { "content": "In the background, a Christmas tree decorated with colorful lights and some stacked cardboard boxes can be seen.", "type": "scene", "weight": 1 }, { "content": "A woman wears a gray sweater.", "type": "attribute", "weight": 2 }, { "content": "The woman quickly walks towards the table from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The woman extends her right hand in the direction of the little girl.", "type": "action", "weight": 3 }, { "content": "The woman's head bumps into a chandelier hanging above the dining table.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera cuts to the woman's full face. She opens her mouth and lets out a loud scream, while stepping backwards and reaching out her right hand to press against her forehead where she was hit.", "visual_elements": [ { "content": "The camera cuts to the woman's full face.", "type": "camera", "weight": 3 }, { "content": "The woman opens her mouth and lets out a loud scream.", "type": "action", "weight": 3 }, { "content": "The woman steps backwards.", "type": "action", "weight": 3 }, { "content": "The woman reaches out her right hand to press against her forehead where she was hit.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a girl with short hair, wearing a green sweater and earrings. She is chewing on something in her mouth. Subsequently, the camera cuts back to the woman in the gray sweater, who lowers her hand from her forehead and bends forward, extending her right hand to grab the bag of chips from the little girl.", "visual_elements": [ { "content": "The camera cuts to a girl with short hair, wearing a green sweater and earrings.", "type": "camera", "weight": 3 }, { "content": "The girl with short hair is chewing on something in her mouth.", "type": "action", "weight": 3 }, { "content": "The camera cuts back to the woman in the gray sweater.", "type": "camera", "weight": 3 }, { "content": "The woman lowers her hand from her forehead.", "type": "action", "weight": 3 }, { "content": "The woman bends forward, extending her right hand to grab the bag of chips from the little girl.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to the little girl. The woman in the gray sweater reaches out and grabs one end of the potato chip bag, while the little girl opens her mouth wide in a loud shout, pulling the other end of the bag with all her might.", "visual_elements": [ { "content": "The camera cuts to the little girl.", "type": "camera", "weight": 3 }, { "content": "The woman wearing a gray sweater reaches out and grabs one end of the potato chip bag.", "type": "action", "weight": 3 }, { "content": "The little girl opens her mouth wide in a loud shout.", "type": "action", "weight": 3 }, { "content": "The little girl pulls the other end of the bag with all her might.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0117", "video_path": "DREAM-1K/367.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 7.88, "resolution": { "width": 1904, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a woman's hands. Her left hand supports a notebook, while her right hand holds a pencil, writing on sheet music paper.\nNext, the camera cuts to an outdoor scene. A woman dressed in a gray hoodie and black pants is running along a path by the lakeside. Her arms are slightly bent, swinging in front of her body in rhythm with her running. One side of the path is the lake, and the other side is dense shrubbery. The weather is gloomy, and the ground is wet and slippery.\nThe camera pans back, and the woman turns to her right, shifting from running in the middle of the path to running on the left side. A man riding a bicycle appears in the scene. He wears a green jacket and blue jeans, gloves on his hands, riding a bicycle forward. As the woman runs, she glances to her right and then looks ahead. The man cycling behind her gradually catches up to her and turns his head to speak to her.", "events": [ { "event": "At the beginning of the video, the camera focuses on a woman's hands. Her left hand supports a notebook, while her right hand holds a pencil, writing on sheet music paper.", "visual_elements": [ { "content": "The camera focuses on a woman's hands.", "type": "camera", "weight": 3 }, { "content": "The woman's left hand supports a notebook.", "type": "attribute", "weight": 2 }, { "content": "The woman's right hand holds a pencil, writing on sheet music paper.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to an outdoor scene. A woman dressed in a gray hoodie and black pants is running along a path by the lakeside. Her arms are slightly bent, swinging in front of her body in rhythm with her running. One side of the path is the lake, and the other side is dense shrubbery. The weather is gloomy, and the ground is wet and slippery.", "visual_elements": [ { "content": "The camera cuts to an outdoor scene.", "type": "camera", "weight": 3 }, { "content": "A woman is dressed in a gray hoodie and black pants.", "type": "attribute", "weight": 3 }, { "content": "The woman is running along a path by the lakeside.", "type": "action", "weight": 3 }, { "content": "The woman's arms are slightly bent", "type": "action", "weight": 2 }, { "content": "The arms swing in front of her body in rhythm with her running.", "type": "action", "weight": 2 }, { "content": "One side of the path is the lake, and the other side is dense shrubbery.", "type": "scene", "weight": 1 }, { "content": "The weather is gloomy, and the ground is wet and slippery.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans back, and the woman turns to her right, shifting from running in the middle of the path to running on the left side. A man riding a bicycle appears in the scene. He wears a green jacket and blue jeans, gloves on his hands, riding a bicycle forward. As the woman runs, she glances to her right and then looks ahead. The man cycling behind her gradually catches up to her and turns his head to speak to her.", "visual_elements": [ { "content": "The camera pans back.", "type": "camera", "weight": 3 }, { "content": "The woman turns to her right, shifting from running in the middle of the path to running on the left side.", "type": "action", "weight": 3 }, { "content": "As the woman moves, a man riding a bicycle appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "The man wears a green jacket and blue jeans, gloves on his hands", "type": "attribute", "weight": 2 }, { "content": "The man rides a bicycle forward.", "type": "action", "weight": 3 }, { "content": "As the woman runs, she glances to her right", "type": "action", "weight": 2 }, { "content": "The woman looks ahead", "type": "action", "weight": 1 }, { "content": "The man gradually catches up to the woman.", "type": "action", "weight": 3 }, { "content": "The man turns his head to speak to the woman.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0118", "video_path": "DREAM-1K/368.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 4.0, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a dark street with a wet, slippery surface reflecting dim light. A building can be seen in the background, with graffiti on its walls and several people seated on chairs near the doorway. In the center of the frame, a woman walks forward with her back to the camera.\nAs the camera advances, the woman grabs both sides of her coat and takes it off. Once the coat is removed, it's clear that she is wearing a police uniform, with the word \"POLICE\" visible on the back. Her black curly hair is tied up in a ponytail, hanging down behind her.", "events": [ { "event": "At the beginning of the video, the camera focuses on a dark street with a wet, slippery surface reflecting dim light. A building can be seen in the background, with graffiti on its walls and several people seated on chairs near the doorway. In the center of the frame, a woman walks forward with her back to the camera.", "visual_elements": [ { "content": "The camera focuses on a dark street.", "type": "camera", "weight": 3 }, { "content": "The street is wet and slippery, reflecting the dim light.", "type": "scene", "weight": 1 }, { "content": "A building can be seen in the background, with graffiti on its walls.", "type": "scene", "weight": 1 }, { "content": "There are several people sitting on chairs near the doorway.", "type": "scene", "weight": 2 }, { "content": "A woman walks forward with her back to the camera.", "type": "action", "weight": 3 } ] }, { "event": "As the camera advances, the woman grabs both sides of her coat and takes it off. Once the coat is removed, it's clear that she is wearing a police uniform, with the word \"POLICE\" visible on the back. Her black curly hair is tied up in a ponytail, hanging down behind her.", "visual_elements": [ { "content": "The camera advances.", "type": "camera", "weight": 3 }, { "content": "The woman grabs both sides of her coat.", "type": "action", "weight": 3 }, { "content": "The woman takes off her coat.", "type": "action", "weight": 3 }, { "content": "Once the coat is removed, it's clear that The woman is wearing a police uniform, with the word \"POLICE\" visible on the back.", "type": "attribute", "weight": 3 }, { "content": "Her black curly hair is tied up in a ponytail, hanging down behind her.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0119", "video_path": "DREAM-1K/370.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 8.25, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a woman sitting on a step. She wears a sleeveless dress with a floral pattern, a watch on her wrist, and a necklace around her neck. Her right hand holds a lit cigarette, and she props her chin up with her right hand while looking towards the right front.\nNext, the woman places a cigarette to her lips, takes a puff, and then removes it, exhaling a cloud of smoke. Subsequently, she turns to her right. With the cigarette held in her right hand, she opens one side of her woven handbag and reaches into it with her left hand, searching for something. Then, she pulls out a blue cell phone from the bag. Afterwards, she looks down at the phone, holding it in her left hand while her right hand's fingers swipe across the screen. The camera slightly shakes.", "events": [ { "event": "At the beginning of the video, the camera focuses on a woman sitting on a step. She wears a sleeveless dress with a floral pattern, a watch on her wrist, and a necklace around her neck. Her right hand holds a lit cigarette, and she props her chin up with her right hand while looking towards the right front.", "visual_elements": [ { "content": "The camera focuses on a woman sitting on a step.", "type": "camera", "weight": 3 }, { "content": "She wears a sleeveless dress with a floral pattern, a watch on her wrist, and a necklace around her neck.", "type": "attribute", "weight": 2 }, { "content": "Her right hand holds a lit cigarette.", "type": "attribute", "weight": 3 }, { "content": "She props her chin up with her right hand.", "type": "attribute", "weight": 3 }, { "content": "She looks towards the right front.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, the woman places a cigarette to her lips, takes a puff, and then removes it, exhaling a cloud of smoke. Subsequently, she turns to her right. With the cigarette held in her right hand, she opens one side of her woven handbag and reaches into it with her left hand, searching for something. Then, she pulls out a blue cell phone from the bag. Afterwards, she looks down at the phone, holding it in her left hand while her right hand's fingers swipe across the screen. The camera slightly shakes.", "visual_elements": [ { "content": "The woman places a cigarette to her lipsand takes a puff.", "type": "action", "weight": 3 }, { "content": "The woman removes the cigarette.", "type": "action", "weight": 2 }, { "content": "The woman exhals a cloud of smoke.", "type": "action", "weight": 2 }, { "content": "The woman turns to her right.", "type": "action", "weight": 3 }, { "content": "The woman hold the cigarette in her right hand", "type": "action", "weight": 2 }, { "content": "The woman opens one side of her woven handbag and reaches into it with her left hand.", "type": "action", "weight": 2 }, { "content": "The woman searching for something", "type": "action", "weight": 3 }, { "content": "The woman pulls out a blue cell phone from the bag.", "type": "action", "weight": 3 }, { "content": "The woman looks down at the phone.", "type": "action", "weight": 3 }, { "content": "The woman holds the phone in her left hand.", "type": "action", "weight": 3 }, { "content": "The woman's right hand fingers swipe across the screen.", "type": "action", "weight": 3 }, { "content": "The camera slightly shakes.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0120", "video_path": "DREAM-1K/371.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 8.25, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a woman wearing a red dress. She stands on a stage with a black curtain as the backdrop, adorned with several small purple lights. Her hair is brown and flows over her shoulders. She moves her limbs gracefully and fluidly as she dances on the stage.\nThe camera cuts, capturing her from behind. The camera pans to the left. The woman still dances on the stage. Gradually, the audience below the stage comes into view, seated at tables and watching her performance intently. A man wearing a helmet runs in from outside the door, looking at the stage and raising his hands.\nThe camera cuts to a man wearing a helmet. The man is dressed in a black leather jacket and wears a white helmet with red stripes. He raises his hands and looks ahead. With both hands, he takes off his helmet and excitedly speaks. The surrounding audience looks at the man with smiles.", "events": [ { "event": "At the beginning of the video, the camera focuses on a woman wearing a red dress. She stands on a stage with a black curtain as the backdrop, adorned with several small purple lights. Her hair is brown and flows over her shoulders. She moves her limbs gracefully and fluidly as she dances on the stage.", "visual_elements": [ { "content": "The camera focuses on a woman wearing a red dress.", "type": "camera", "weight": 3 }, { "content": "A woman stands on a stage.", "type": "attribute", "weight": 3 }, { "content": "The  backgroundp is a black curtain, adorned with several small purple lights.", "type": "scene", "weight": 1 }, { "content": "The woman's hair is brown and flows over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "The woman  moves her limbs gracefully and fluidly as she dances on the stage.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts, capturing her from behind. The camera pans to the left. The woman still dances on the stage. Gradually, the audience below the stage comes into view, seated at tables and watching her performance intently. A man wearing a helmet runs in from outside the door, looking at the stage and raising his hands.", "visual_elements": [ { "content": "The camera cuts, capturing her from behind.", "type": "camera", "weight": 3 }, { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The woman still dances on the stage.", "type": "action", "weight": 2 }, { "content": "Gradually, the audience below the stage comes into view, seated at tables and watching her performance intently.", "type": "scene", "weight": 3 }, { "content": "A man wearing a helmet runs in from outside the door, looking at the stage and raising his hands.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a man wearing a helmet. The man is dressed in a black leather jacket and wears a white helmet with red stripes. He raises his hands and looks ahead. With both hands, he takes off his helmet and excitedly speaks. The surrounding audience looks at the man with smiles.", "visual_elements": [ { "content": "The camera cuts to a man wearing a helmet.", "type": "camera", "weight": 3 }, { "content": "The man is dressed in a black leather jacket and wears a white helmet with red stripes.", "type": "attribute", "weight": 2 }, { "content": "The man raises his hands and looks ahead.", "type": "attribute", "weight": 3 }, { "content": "The man takes off his helmet with both hands.", "type": "action", "weight": 3 }, { "content": "The man speaks excitedly.", "type": "action", "weight": 3 }, { "content": "The surrounding audience looks at the man with smiles.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0121", "video_path": "DREAM-1K/372.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.75, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera shows the entrance of a stone castle, surrounded by many soldiers in armor and some civilians. The soldiers are mostly wearing chain mail and helmets, holding weapons, and some are mounted on horses. In the background, stone walls, wooden barrels, carts, and some piled-up debris are visible.\nThe camera gradually zooms in, revealing a man with his hands tied standing under a gibbet at the entrance of a castle. He wears gray, coarse clothing and looks very distressed. Soldiers surround him. Outside the castle gate, more cavalrymen are waiting. A rope is looped around the man's neck, and two soldiers next to him are pulling down on another rope together. Simultaneously, the man is hoisted up by the rope, his feet kicking in the air.\nThe camera cuts to a close-up view of a soldier approaching the man who is hanging up. The man has a very painful expression and struggls in the air. The soldier in front of him raises his hands and then rips the man's clothes with both hands.", "events": [ { "event": "At the beginning of the video, the camera shows the entrance of a stone castle, surrounded by many soldiers in armor and some civilians. The soldiers are mostly wearing chain mail and helmets, holding weapons, and some are mounted on horses. In the background, stone walls, wooden barrels, carts, and some piled-up debris are visible.", "visual_elements": [ { "content": "At the beginning of the video, the camera shows the entrance of a stone castle.", "type": "camera", "weight": 3 }, { "content": "The stone castle is surrounded by many soldiers in armor and some civilians.", "type": "scene", "weight": 3 }, { "content": "The soldiers are mostly wearing chain mail and helmets, holding weapons, and some are mounted on horses.", "type": "attribute", "weight": 2 }, { "content": "In the background, stone walls, wooden barrels, carts, and some piled-up debris are visible.", "type": "scene", "weight": 1 } ] }, { "event": "The camera gradually zooms in, revealing a man with his hands tied standing under a gibbet at the entrance of a castle. He wears gray, coarse clothing and looks very distressed. Soldiers surround him. Outside the castle gate, more cavalrymen are waiting. A rope is looped around the man's neck, and two soldiers next to him are pulling down on another rope together. Simultaneously, the man is hoisted up by the rope, his feet kicking in the air.", "visual_elements": [ { "content": "The camera gradually zooms in.", "type": "camera", "weight": 3 }, { "content": "A man with his hands tied stands under a gibbet at the entrance of a castle.", "type": "attribute", "weight": 3 }, { "content": "The man wears gray, coarse clothing.", "type": "attribute", "weight": 2 }, { "content": "Soldiers surround the man.", "type": "scene", "weight": 3 }, { "content": "Outside the castle gate, more cavalrymen are waiting.", "type": "scene", "weight": 1 }, { "content": "A rope is looped around the man's neck.", "type": "attribute", "weight": 3 }, { "content": "Two soldiers next to him are pulling down on another rope together.", "type": "action", "weight": 3 }, { "content": "The man  is hoisted up by the rope", "type": "action", "weight": 3 }, { "content": "The man's feet kick in the air.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up view of a soldier approaching the man who is hanging up. The man has a very painful expression and struggls in the air. The soldier in front of him raises his hands and then rips the man's clothes with both hands.", "visual_elements": [ { "content": "The camera cuts to a close-up view.", "type": "camera", "weight": 3 }, { "content": "The soldier approaches the man who is hanging up.", "type": "action", "weight": 2 }, { "content": "The man has a very painful expression and struggls in the air.", "type": "action", "weight": 2 }, { "content": "The soldier in front of the man raises his hands.", "type": "action", "weight": 3 }, { "content": "The soldier rips the man's clothes with both hands.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0122", "video_path": "DREAM-1K/373.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.75, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the scene shows a group of people engaged in intense battle in shallow water. The background is a dense forest with lush trees. The combatants wear various medieval costumes, predominantly in shades of brown and green. Some wield long swords, while others brandish axes, causing water to splash everywhere and creating a chaotic scene.\nThe camera first focuses on a man wearing a brown striped coat. The man holds a long sword in his hand and raises it to strike his enemy in front of him. In the scene, one can see people falling, and blood ripples in the water. A man wearing a blue headscarf holds a weapon and charges towards the man in the brown striped coat. The man in the blue headscarf uses his weapon to tackle the man in the brown striped coat into the water.\nFinally, the camera cuts to a close-up view. The man wearing the blue headscarf holds the man in the brown striped coat underwater. The man in the brown striped coat struggles in the water, causing water to splash everywhere.", "events": [ { "event": "At the beginning of the video, the scene shows a group of people engaged in intense battle in shallow water. The background is a dense forest with lush trees. The combatants wear various medieval costumes, predominantly in shades of brown and green. Some wield long swords, while others brandish axes, causing water to splash everywhere and creating a chaotic scene.", "visual_elements": [ { "content": "The scene shows a group of people engaged in intense battle in shallow water.", "type": "scene", "weight": 3 }, { "content": "The background is a dense forest with lush trees.", "type": "scene", "weight": 1 }, { "content": "The combatants wear various medieval costumes, predominantly in shades of brown and green.", "type": "attribute", "weight": 2 }, { "content": "Some wield long swords, while others brandish axes.", "type": "attribute", "weight": 2 }, { "content": "The water splashes everywhere, and the scene is in chaos.", "type": "scene", "weight": 2 } ] }, { "event": "The camera first focuses on a man wearing a brown striped coat. The man holds a long sword in his hand and raises it to strike his enemy in front of him. In the scene, one can see people falling, and blood ripples in the water. A man wearing a blue headscarf holds a weapon and charges towards the man in the brown striped coat. The man in the blue headscarf uses his weapon to tackle the man in the brown striped coat into the water.", "visual_elements": [ { "content": "The camera first focuses on a man wearing a brown striped coat.", "type": "camera", "weight": 3 }, { "content": "The man holds a long sword in his hand.", "type": "attribute", "weight": 2 }, { "content": "The man  raises his weapon to strike his enemy in front of him.", "type": "action", "weight": 3 }, { "content": "In the scene, one can see people falling, and blood ripples in the water.", "type": "scene", "weight": 2 }, { "content": "A man wearing a blue headscarf holds a weapon and charges towards the man in the brown striped coat.", "type": "action", "weight": 3 }, { "content": "The man in the blue headscarf uses his weapon to tackle the man in the brown striped coat into the water.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to a close-up view. The man wearing the blue headscarf holds the man in the brown striped coat underwater. The man in the brown striped coat struggles in the water, causing water to splash everywhere.", "visual_elements": [ { "content": "The camera cuts to a close-up view.", "type": "camera", "weight": 3 }, { "content": "The man wearing the blue headscarf holds the man in the brown striped coat underwater.", "type": "action", "weight": 3 }, { "content": "The man in the brown striped coat struggles in the water.", "type": "action", "weight": 3 }, { "content": "The water splashes everywhere.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0123", "video_path": "DREAM-1K/374.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 11.5, "resolution": { "width": 1280, "height": 544 }, "caption": "At the beginning of the video, the camera focuses on a man wearing a light-colored T-shirt and a necklace. He raises both hands, with a set of keys gripped in his right hand, staring ahead. Subsequently, he moves his hands. The background is a room with white curtains hanging on the windows, and light filters in through the curtains.\nThe camera cuts to a woman wearing a black dress. She holds a blue umbrella in her right hand, with the tip of the umbrella pointing towards the man. Then, the woman turns her head to the right and looks down.\nThen the camera cuts to a wooden table, on which a black leather jacket and a black handgun are placed.\nThe camera returns to the woman, who points at the man with the tip of her umbrella, leaning to her right side and reaching for a handgun on the table with her right hand. The camera pans the woman's movements. She raises her right hand, holding the pistol aimed at the man. The woman looks at the man as she speaks. The man still keeps his hands raised.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man wearing a light-colored T-shirt and a necklace. He raises both hands, with a set of keys gripped in his right hand, staring ahead. Subsequently, he moves his hands. The background is a room with white curtains hanging on the windows, and light filters in through the curtains.", "visual_elements": [ { "content": "The camera focuses on a man.", "type": "camera", "weight": 3 }, { "content": "The man wears a light-colored T-shirt and a necklace.", "type": "attribute", "weight": 2 }, { "content": "The man raises both hands", "type": "attribute", "weight": 3 }, { "content": "The man grips a set of keys in his right hand.", "type": "attribute", "weight": 3 }, { "content": "The man stares ahead.", "type": "attribute", "weight": 3 }, { "content": "The man moves his hands.", "type": "action", "weight": 3 }, { "content": "The background is a room with white curtains hanging on the windows, and light filters in through the curtains.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to a woman wearing a black dress. She holds a blue umbrella in her right hand, with the tip of the umbrella pointing towards the man. Then, the woman turns her head to the right and looks down.", "visual_elements": [ { "content": "The camera cuts to a woman.", "type": "camera", "weight": 3 }, { "content": "The woman wears a black dress.", "type": "attribute", "weight": 2 }, { "content": "The woman holds a blue umbrella in her right hand, with the tip of the umbrella pointing towards the man.", "type": "action", "weight": 3 }, { "content": "The woman turns her head to the right.", "type": "action", "weight": 3 }, { "content": "The woman  looks down.", "type": "action", "weight": 3 } ] }, { "event": "Then the camera cuts to a wooden table, on which a black leather jacket and a black handgun are placed.", "visual_elements": [ { "content": "Then the camera cuts to a wooden table.", "type": "camera", "weight": 3 }, { "content": "There are a black leather jacket and a black handgun on the table.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera returns to the woman, who points at the man with the tip of her umbrella, leaning to her right side and reaching for a handgun on the table with her right hand. The camera pans the woman's movements. She raises her right hand, holding the pistol aimed at the man. The woman looks at the man as she speaks. The man still keeps his hands raised.", "visual_elements": [ { "content": "The camera returns to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman points at the man with the tip of her umbrella.", "type": "action", "weight": 3 }, { "content": "The woman leans to her right side.", "type": "action", "weight": 3 }, { "content": "The woman reaches for a handgun on the table with her right hand.", "type": "action", "weight": 3 }, { "content": "The camera pans the woman's movements.", "type": "camera", "weight": 3 }, { "content": "The woman raises her right hand, holding the pistol aimed at the man.", "type": "action", "weight": 3 }, { "content": "The woman looks at the man as she speaks.", "type": "action", "weight": 3 }, { "content": "The man still keeps his hands raised.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0124", "video_path": "DREAM-1K/375.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 9.12, "resolution": { "width": 1280, "height": 544 }, "caption": "At the beginning of the video, the camera focuses on a man wearing a black leather jacket, standing in a dimly lit room. He looks up at the light bulb and turns it on with his right hand. The room is illuminated by the light bulb.\nThen the camera cuts to a woman sitting on a white plastic chair. She wears black clothes, with her hands tied behind her back and her waist bound with a blue rope. The background of the room is a red brick wall, and next to it is a gray filing cabinet with a white towel placed on it. The woman pants heavily with her tongue sticking out, and she looks at the man.\nThe camera cuts again, showing the man walking forward with his back to the camera. Subsequently, the man turns to look at the woman.\nThen the camera cuts back to the woman, who is still tied to the chair. She pants heavily as she looks at the man and speaks.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man wearing a black leather jacket, standing in a dimly lit room. He looks up at the light bulb and turns it on with his right hand. The room is illuminated by the light bulb.", "visual_elements": [ { "content": "The camera focuses on a man.", "type": "camera", "weight": 3 }, { "content": "The man wears a black leather jacket.", "type": "attribute", "weight": 2 }, { "content": "The man stands in a dimly lit room.", "type": "attribute", "weight": 3 }, { "content": "The man looks up at the light bulb.", "type": "action", "weight": 3 }, { "content": "The man turns on the light with his right hand.", "type": "action", "weight": 3 }, { "content": "The room is illuminated by the light bulb.", "type": "scene", "weight": 3 } ] }, { "event": "Then the camera cuts to a woman sitting on a white plastic chair. She wears black clothes, with her hands tied behind her back and her waist bound with a blue rope. The background of the room is a red brick wall, and next to it is a gray filing cabinet with a white towel placed on it. The woman pants heavily with her tongue sticking out, and she looks at the man.", "visual_elements": [ { "content": "Then the camera cuts to a woman sitting on a white plastic chair.", "type": "camera", "weight": 3 }, { "content": "The woman wears black clothes.", "type": "attribute", "weight": 2 }, { "content": "The woman's hands are tied behind her back, and her waist is bound with a blue rope.", "type": "attribute", "weight": 3 }, { "content": "The background of the room is a red brick wall, and next to it is a gray filing cabinet with a white towel placed on it.", "type": "scene", "weight": 1 }, { "content": "The woman pants heavily.", "type": "action", "weight": 3 }, { "content": "The woman's tongue sticks out.", "type": "action", "weight": 3 }, { "content": "The woman looks at the man.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again, showing the man walking forward with his back to the camera. Subsequently, the man turns to look at the woman.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "The man walks forward with his back to the camera.", "type": "action", "weight": 3 }, { "content": "As he reaches the doorway, the man turns to look at the woman.", "type": "action", "weight": 3 } ] }, { "event": "Then the camera cuts back to the woman, who is still tied to the chair. She pants heavily as she looks at the man and speaks.", "visual_elements": [ { "content": "The camera cuts back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman looks the man and speaks.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0125", "video_path": "DREAM-1K/376.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 5.5, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera is focused on a man wearing a blue shirt standing in a room. The walls of the room are light blue, and there is a painting hanging on the wall.\nThe man walked towards the window of the room. The man walked to the bedside and sat there. The camera first moves to the right and then to the left and down as the man moves.", "events": [ { "event": "At the beginning of the video, the camera is focused on a man wearing a blue shirt standing in a room. The walls of the room are light blue, and there is a painting hanging on the wall.", "visual_elements": [ { "content": "Focused the camera at a male.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a blue shirt.", "type": "attribute", "weight": 2 }, { "content": "The man is standing in a room.", "type": "attribute", "weight": 2 }, { "content": "The walls of the room are light blue, and there is a painting hanging on them.", "type": "scene", "weight": 1 } ] }, { "event": "The man walked towards the window of the room. The man walked to the bedside and sat there. The camera first moves to the right and then to the left and down as the man moves.", "visual_elements": [ { "content": "The man walked towards the window of the room.", "type": "action", "weight": 3 }, { "content": "The man walked to the bedside and sat there.", "type": "action", "weight": 3 }, { "content": "The camera first moves to the right and then to the left and down as the man moves.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0126", "video_path": "DREAM-1K/377.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 8.12, "resolution": { "width": 1920, "height": 800 }, "caption": "The camera is aimed at a woman wearing a pink plush jacket. The woman's hair is golden and draped over her shoulders. The woman frowned and walked forward. The camera slowly moves to the right, with a young man following behind the woman.\nThe camera is shot behind the woman, following her movements. The woman's pace was a bit hurried, and her hair fluttered with her steps. The woman is in a relatively bright corridor. The woman walked forward and turned left, then opened a door. The woman walked in and closed the door, lying on the toilet. The door behind the woman was bounced open.", "events": [ { "event": "The camera is aimed at a woman wearing a pink plush jacket. The woman's hair is golden and draped over her shoulders. The woman frowned and walked forward. The camera slowly moves to the right, with a young man following behind the woman.", "visual_elements": [ { "content": "The camera is aimed at a woman wearing a pink plush jacket.", "type": "camera", "weight": 3 }, { "content": "The woman's hair is golden and draped over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "The woman frowned and walked forward.", "type": "action", "weight": 3 }, { "content": "The camera slowly moves to the right.", "type": "camera", "weight": 3 }, { "content": "A young man followed behind the woman.", "type": "action", "weight": 2 } ] }, { "event": "The camera is shot behind the woman, following her movements. The woman's pace was a bit hurried, and her hair fluttered with her steps. The woman is in a relatively bright corridor. The woman walked forward and turned left, then opened a door. The woman walked in and closed the door, lying on the toilet. The door behind the woman was bounced open.", "visual_elements": [ { "content": "The camera is shot behind the woman, following her movements.", "type": "camera", "weight": 3 }, { "content": "The woman's pace was a bit hurried, and her hair fluttered with her steps.", "type": "action", "weight": 3 }, { "content": "The woman is in a relatively bright corridor.", "type": "camera", "weight": 3 }, { "content": "The woman walked forward and turned left.", "type": "action", "weight": 3 }, { "content": "The woman opened a door.", "type": "action", "weight": 3 }, { "content": "The woman walked in and closed the door.", "type": "action", "weight": 3 }, { "content": "The woman lying on the toilet.", "type": "action", "weight": 3 }, { "content": "The door behind the woman was bounced open.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0127", "video_path": "DREAM-1K/378.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 3.62, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a stage with several performers. In the middle of the picture is a woman wearing a white long sleeved shirt, a necklace around her neck, and a guitar in her hand. She stood in front of the microphone. Her hair is curly and coiled on top of her head. On the stage to the right of the woman, a man wearing black clothes is playing an electric guitar. There is also a pianist and keyboardist behind the stage. The background of the stage is relatively dim, but some lighting equipment and other instruments can be seen. The woman leaned forward slightly, approaching the microphone, and smiled as she straightened up and looked ahead. The woman placed her right hand on her chest.\nThe camera cut to the audience seat, which is filled with people watching the stage, many applauding. The lights shone down from above the stage, illuminating some of the audience seats. The woman bowed to the audience.", "events": [ { "event": "At the beginning of the video, the camera focuses on a stage with several performers. In the middle of the picture is a woman wearing a white long sleeved shirt, a necklace around her neck, and a guitar in her hand. She stood in front of the microphone. Her hair is curly and coiled on top of her head. On the stage to the right of the woman, a man wearing black clothes is playing an electric guitar. There is also a pianist and keyboardist behind the stage. The background of the stage is relatively dim, but some lighting equipment and other instruments can be seen. The woman leaned forward slightly, approaching the microphone, and smiled as she straightened up and looked ahead. The woman placed her right hand on her chest.", "visual_elements": [ { "content": "The camera focuses on a stage.", "type": "camera", "weight": 3 }, { "content": "There are several performers on the stages.", "type": "scene", "weight": 3 }, { "content": "In the middle of the picture is a woman.", "type": "scene", "weight": 3 }, { "content": "The woman was wearing a white long sleeved shirt, a necklace around her neck, and holding a guitar in her hand.", "type": "attribute", "weight": 2 }, { "content": "The woman stands in front of the microphone.", "type": "attribute", "weight": 3 }, { "content": "The woman's hair curls up and curls up on the top of their heads.", "type": "attribute", "weight": 1 }, { "content": "On the stage to the right of the woman, a man is wearing black clothes.", "type": "attribute", "weight": 2 }, { "content": "The man wearing black clothes is playing an electric guitar.", "type": "action", "weight": 2 }, { "content": "There is also a pianist and keyboardist behind the stage.", "type": "scene", "weight": 2 }, { "content": "The background of the stage is relatively dim, but some lighting equipment and other instruments can be seen.", "type": "scene", "weight": 1 }, { "content": "The woman leaned forward slightly, close to the microphone.", "type": "action", "weight": 3 }, { "content": "The woman smiled and straightened up.", "type": "action", "weight": 3 }, { "content": "The woman looked ahead.", "type": "action", "weight": 2 }, { "content": "The woman placed her right hand on her chest.", "type": "action", "weight": 1 } ] }, { "event": "The camera cut to the audience seat, which is filled with people watching the stage, many applauding. The lights shone down from above the stage, illuminating some of the audience seats. The woman bowed to the audience.", "visual_elements": [ { "content": "The camera cut to the audience seat.", "type": "camera", "weight": 3 }, { "content": "The audience seat was filled with people.", "type": "scene", "weight": 3 }, { "content": "Everyone is watching the stage, and many people are applauding.", "type": "scene", "weight": 3 }, { "content": "The lights shone down from above the stage, illuminating some of the audience seats.", "type": "scene", "weight": 1 }, { "content": "The woman bow to the audience.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 19 }, { "index": "TUNA_0128", "video_path": "DREAM-1K/379.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 5.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is aimed at a man sitting at a table. He was wearing a brown suit jacket, a white shirt, and a blue tie, with a transparent kettle and a glass in front of him. The background is a room with wooden walls, with a mirror hanging on the wall and a staircase leading upstairs next to it.\nThen, an elderly man wearing an orange jacket and blue jeans enters the scene from the right side. The older man walked up to the man sitting down and held the plate placed in front of him with his left hand. The man sitting turned his head slightly to look at the older man walking towards him.\nThe camera moves to the left and up. The older man walked behind the sitting man. Suddenly, the elderly man quickly turned around and struck the sitting man's head with a plate, causing the plate to shatter and fragments to scatter. After being hit, the man sitting tilted his body to his left, lowered his head, and left the stool with his hands covering his head.", "events": [ { "event": "At the beginning of the video, the camera is aimed at a man sitting at a table. He was wearing a brown suit jacket, a white shirt, and a blue tie, with a transparent kettle and a glass in front of him. The background is a room with wooden walls, with a mirror hanging on the wall and a staircase leading upstairs next to it.", "visual_elements": [ { "content": "The camera is aimed at a man sitting at a table.", "type": "camera", "weight": 3 }, { "content": "The man sitting is wearing a brown suit jacket, a white shirt, and a blue tie.", "type": "attribute", "weight": 2 }, { "content": "There is a transparent kettle and a glass cup on the table.", "type": "scene", "weight": 1 }, { "content": "The background is a room with wooden walls, with a mirror hanging on the wall and a staircase leading upstairs next to it.", "type": "scene", "weight": 1 } ] }, { "event": "Then, an elderly man wearing an orange jacket and blue jeans enters the scene from the right side. The older man walked up to the man sitting down and held the plate placed in front of him with his left hand. The man sitting turned his head slightly to look at the older man walking towards him.", "visual_elements": [ { "content": "An elderly man wearing an orange jacket and blue jeans enters the scene from the right side.", "type": "action", "weight": 3 }, { "content": "The older man walked up to the man sitting next to him.", "type": "action", "weight": 3 }, { "content": "The olderly man held the plate in front of the sitting man in his left hand.", "type": "action", "weight": 3 }, { "content": "The man sitting turned his head slightly and looked at the older man walking towards him.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves to the left and up. The older man walked behind the sitting man. Suddenly, the elderly man quickly turned around and struck the sitting man's head with a plate, causing the plate to shatter and fragments to scatter. After being hit, the man sitting tilted his body to his left, lowered his head, and left the stool with his hands covering his head.", "visual_elements": [ { "content": "The camera moves left and up.", "type": "camera", "weight": 3 }, { "content": "The older man walked behind the sitting man.", "type": "action", "weight": 3 }, { "content": "The olderly man quickly turned around.", "type": "action", "weight": 3 }, { "content": "The older man slammed a plate on the head of a sitting man.", "type": "action", "weight": 3 }, { "content": "The plate shattered and the fragments scattered in all directions.", "type": "scene", "weight": 2 }, { "content": "After the man sitting was hit, his body tilted to his left.", "type": "action", "weight": 3 }, { "content": "The man sitting with his hands covering his head left the stool.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0129", "video_path": "DREAM-1K/380.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 7.0, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera looks down from above and shows a middle-aged man wearing a gray suit standing in front of an old-fashioned toilet water tank. The water tank is white with a slightly aged surface. The walls are covered with white tiles, and the floor is made of black and white checkered tiles. The man stood in front of the water tank, his hands reaching towards the top of the tank. The man opened the top lid of the water tank with his right hand and took out a transparent glass bottle from the tank with his left hand. A transparent glass bottle contains liquid.", "events": [ { "event": "At the beginning of the video, the camera looks down from above and shows a middle-aged man wearing a gray suit standing in front of an old-fashioned toilet water tank. The water tank is white with a slightly aged surface. The walls are covered with white tiles, and the floor is made of black and white checkered tiles. The man stood in front of the water tank, his hands reaching towards the top of the tank. The man opened the top lid of the water tank with his right hand and took out a transparent glass bottle from the tank with his left hand. A transparent glass bottle contains liquid.", "visual_elements": [ { "content": "The camera is shooting from above looking down.", "type": "camera", "weight": 3 }, { "content": "A middle-aged man wearing a gray suit stands in front of an old-fashioned toilet water tank.", "type": "attribute", "weight": 2 }, { "content": "The water tank is white with a slightly aged surface. The walls are covered with white tiles, and the floor is made of black and white checkered tiles.", "type": "scene", "weight": 2 }, { "content": "The man stands in front of the water tank.", "type": "action", "weight": 3 }, { "content": "The man reached for the top of the water tank with both hands.", "type": "action", "weight": 3 }, { "content": "The man opened the lid on the top of the water tank with his right hand.", "type": "action", "weight": 3 }, { "content": "The man took out a transparent glass bottle from the water tank with his left hand.", "type": "action", "weight": 3 }, { "content": "A transparent glass bottle contains liquid inside.", "type": "attribute", "weight": 1 } ] } ], "n_events": 1, "n_elements": 8 }, { "index": "TUNA_0130", "video_path": "DREAM-1K/381.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 3.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera foucues a circular table with some stationery and books placed on it. There are two children sitting around the table, a girl wearing a pink short sleeved shirt and a boy wearing a blue hoodie. Two people are doing homework, the girl holds a pen in her right hand and the boy holds a pen in his left hand. There is a colored glass bowl, a pen holder containing pencils and pens, and some other small objects on the table. In the background, you can see a bookshelf with several books and some decorations placed on it. The girl looked up towards the front.\nThe camera cuts to a close-up shot of an adult woman holding a phone in her right hand and making a call to her ear. She is wearing a light colored turtleneck sweater and her hair is light colored. The camera focuses on her face and hands, the background is blurry, and it can be vaguely seen that the previous boy is still at the table. Subsequently, the woman turned her head towards the back.", "events": [ { "event": "At the beginning of the video, the camera foucues a circular table with some stationery and books placed on it. There are two children sitting around the table, a girl wearing a pink short sleeved shirt and a boy wearing a blue hoodie. Two people are doing homework, the girl holds a pen in her right hand and the boy holds a pen in his left hand. There is a colored glass bowl, a pen holder containing pencils and pens, and some other small objects on the table. In the background, you can see a bookshelf with several books and some decorations placed on it. The girl looked up towards the front.", "visual_elements": [ { "content": "Aim the camera at a circular table.", "type": "camera", "weight": 3 }, { "content": "There are some stationery and books placed on the table.", "type": "scene", "weight": 2 }, { "content": "There are two children sitting around the table.", "type": "scene", "weight": 3 }, { "content": "A girl on the left side of the screen is wearing a pink short sleeved shirt.", "type": "attribute", "weight": 2 }, { "content": "A boy on the right is wearing a blue hoodie.", "type": "attribute", "weight": 2 }, { "content": "Two people are doing homework.", "type": "action", "weight": 3 }, { "content": "The girl holds a pen in her right hand, while the boy holds a pen in his left hand.", "type": "attribute", "weight": 2 }, { "content": "There is a colored glass bowl, a pen holder containing pencils and pens, and some other small objects on the table.", "type": "scene", "weight": 1 }, { "content": "In the background, you can see a bookshelf with several books and some decorations placed on it.", "type": "scene", "weight": 1 }, { "content": "The girl lifted her eyes and looked ahead.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up shot of an adult woman holding a phone in her right hand and making a call to her ear. She is wearing a light colored turtleneck sweater and her hair is light colored. The camera focuses on her face and hands, the background is blurry, and it can be vaguely seen that the previous boy is still at the table. Subsequently, the woman turned her head towards the back.", "visual_elements": [ { "content": "The camera cuts to a close-up shot of an adult female.", "type": "camera", "weight": 3 }, { "content": "A woman holds a phone in her right hand and puts it in her ear to make a phone call.", "type": "action", "weight": 3 }, { "content": "The woman is wearing a light colored turtleneck sweater with light colored hair.", "type": "attribute", "weight": 2 }, { "content": "The camera focuses on her face and hands.", "type": "camera", "weight": 3 }, { "content": "The background is blurry, and you can vaguely see that the previous boy is still at the table.", "type": "scene", "weight": 1 }, { "content": "The woman turned her head back.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0131", "video_path": "DREAM-1K/382.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 11.5, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on the hand of a man wearing a black suit. The man's cuffs revealed the sleeves of his white shirt. In the background, several rows of wooden long tables can be seen, with some scattered notebooks placed on them. The man walked to the table on the right side of the screen, holding several books in his left hand and putting the books on the table into his left hand with his right hand. The camera moves with his movements.\nThe camera cuts to a broader perspective, displaying the layout of the entire room. There are several large windows in the room, with light colored curtains hanging on them. Sunlight pours into the room through the curtains, and there are several paintings and a clock hanging on the wall. The man walked forward with his back to the camera. The man walked forward while picking up the notebook on the table with his right hand and placing it in his left hand.\nThe camera zoom in. The man received the notebook in his hand, sighed, and raised his right hand to put down a stack of notebooks.", "events": [ { "event": "At the beginning of the video, the camera focuses on the hand of a man wearing a black suit. The man's cuffs revealed the sleeves of his white shirt. In the background, several rows of wooden long tables can be seen, with some scattered notebooks placed on them. The man walked to the table on the right side of the screen, holding several books in his left hand and putting the books on the table into his left hand with his right hand. The camera moves with his movements.", "visual_elements": [ { "content": "The camera focuses on the hand of a man wearing a black suit.", "type": "camera", "weight": 3 }, { "content": "The man's cuffs reveal the sleeves of his white shirt.", "type": "attribute", "weight": 1 }, { "content": "In the background, several rows of wooden long tables can be seen, with some scattered notebooks placed on them.", "type": "scene", "weight": 2 }, { "content": "The man walked to the table on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The man's left hand is holding several books.", "type": "attribute", "weight": 2 }, { "content": "The man put the notebook on the table into his left hand with his right hand.", "type": "action", "weight": 3 }, { "content": "The camera pans with his movements.", "type": "scene", "weight": 3 } ] }, { "event": "The camera cuts to a broader perspective, displaying the layout of the entire room. There are several large windows in the room, with light colored curtains hanging on them. Sunlight pours into the room through the curtains, and there are several paintings and a clock hanging on the wall. The man walked forward with his back to the camera. The man walked forward while picking up the notebook on the table with his right hand and placing it in his left hand.", "visual_elements": [ { "content": "The camera cuts to a broader perspective, displaying the layout of the entire room.", "type": "camera", "weight": 3 }, { "content": "There are several large windows in the room, with light colored curtains hanging on them. Sunlight pours into the room through the curtains, and there are several paintings and a clock hanging on the wall.", "type": "scene", "weight": 2 }, { "content": "The man walked forward with his back to the camera.", "type": "action", "weight": 3 }, { "content": "The man walked forward while picking up the notebook on the table with his right hand and placing it in his left hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera zoom in. The man received the notebook in his hand, sighed, and raised his right hand to put down a stack of notebooks.", "visual_elements": [ { "content": "The camera zoom in.", "type": "camera", "weight": 3 }, { "content": "The man received the notebook in his hand.", "type": "action", "weight": 2 }, { "content": "The man sighed.", "type": "action", "weight": 2 }, { "content": "The man raised his right hand and prepared to put down a sack of notebooks.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0132", "video_path": "DREAM-1K/383.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 8.75, "resolution": { "width": 1840, "height": 1024 }, "caption": "The camera focuses on a dense forest, with sunlight shining on tree trunks and leaves, creating mottled light and shadow. Several young people wearing clothes of different colors ran past the camera one by one from the right side of the screen.\nCut to a scene of a river. A young boy is sitting on a wooden boat, wearing a striped shirt and dark pants, and a gray hat. The boy is rowing a boat with an oar, and he turns his head to look around. The ship slowly moved forward on the water surface.\nThe camera cuts again, and a girl wearing a white long dress kneels on a raft. The trees and river water around shimmered in the sunlight. The girl's hair was draped over her shoulders, and she supported the raft with her hands, leaning forward slightly. The raft floated in the water, and the girl turned her head to look back. Finally, the raft tripped over a stone in the water, and the girl was startled and leaned forward.", "events": [ { "event": "The camera focuses on a dense forest, with sunlight shining on tree trunks and leaves, creating mottled light and shadow. Several young people wearing clothes of different colors ran past the camera one by one from the right side of the screen.", "visual_elements": [ { "content": "The camera focuses on a dense forest.", "type": "camera", "weight": 3 }, { "content": "The sunlight shines on the trunk and leaves, creating mottled light and shadow.", "type": "scene", "weight": 2 }, { "content": "Several young people wearing clothes of different colors ran past the camera one by one from the right side of the screen.", "type": "scene", "weight": 3 } ] }, { "event": "Cut to a scene of a river. A young boy is sitting on a wooden boat, wearing a striped shirt and dark pants, and a gray hat. The boy is rowing a boat with an oar, and he turns his head to look around. The ship slowly moved forward on the water surface.", "visual_elements": [ { "content": "Cut to a scene of a river.", "type": "camera", "weight": 3 }, { "content": "A young boy is sitting on a wooden boat.", "type": "attribute", "weight": 3 }, { "content": "The boy is wearing a striped shirt and dark pants, with a gray hat on his head.", "type": "action", "weight": 3 }, { "content": "The boy is rowing a boat with an oar.", "type": "action", "weight": 3 }, { "content": "The boy turned his head and looked around.", "type": "action", "weight": 3 }, { "content": "The ship slowly moved forward on the water surface.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts again, and a girl wearing a white long dress kneels on a raft. The trees and river water around shimmered in the sunlight. The girl's hair was draped over her shoulders, and she supported the raft with her hands, leaning forward slightly. The raft floated in the water, and the girl turned her head to look back. Finally, the raft tripped over a stone in the water, and the girl was startled and leaned forward.", "visual_elements": [ { "content": "Cut the camera again.", "type": "camera", "weight": 3 }, { "content": "A girl wearing a white long dress.", "type": "attribute", "weight": 2 }, { "content": "The girl knelt on a raft.", "type": "attribute", "weight": 3 }, { "content": "The trees and river water around shimmered in the sunlight.", "type": "scene", "weight": 1 }, { "content": "The girl's hair was draped over her shoulders.", "type": "attribute", "weight": 1 }, { "content": "The girl supported the raft with her hands and leaned forward slightly.", "type": "attribute", "weight": 3 }, { "content": "The raft floated in the current.", "type": "scene", "weight": 1 }, { "content": "The girl turned her head and looked back.", "type": "action", "weight": 3 }, { "content": "The raft tripped over a stone in the water.", "type": "scene", "weight": 2 }, { "content": "The girl was startled and leaned forward.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0133", "video_path": "DREAM-1K/384.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 9.0, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera is aimed at a woman wearing white pajamas, who is sitting on the bed, leaning against the headboard. The background is a patterned wall, and a lit kerosene lamp is placed on the bedside table, gently illuminating the surrounding environment. She is holding a book in her hand.\nPan moves slightly to the right, and the woman flips through the book in her hand under the light of the kerosene lamp. Then, the camera moves slightly downwards. She stopped reading and turned her head to look at the box on the bedside table. She reached out her right hand, opened the box on the table, and searched inside for something.", "events": [ { "event": "At the beginning of the video, the camera is aimed at a woman wearing white pajamas, who is sitting on the bed, leaning against the headboard. The background is a patterned wall, and a lit kerosene lamp is placed on the bedside table, gently illuminating the surrounding environment. She is holding a book in her hand.", "visual_elements": [ { "content": "The camer is aimed at a woman wearing white pajamas.", "type": "camera", "weight": 3 }, { "content": "The woman is sitting on the bed, leaning against the headboard.", "type": "attribute", "weight": 2 }, { "content": "The background is a patterned wall, and there is a lit kerosene lamp on the bedside table, which gently illuminates the surrounding environment.", "type": "scene", "weight": 2 }, { "content": "She is holding a book in her hand.", "type": "action", "weight": 3 } ] }, { "event": "Pan moves slightly to the right, and the woman flips through the book in her hand under the light of the kerosene lamp. Then, the camera moves slightly downwards. She stopped reading and turned her head to look at the box on the bedside table. She reached out her right hand, opened the box on the table, and searched inside for something.", "visual_elements": [ { "content": "Pan moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The woman's body moves towards the bedside table.", "type": "action", "weight": 3 }, { "content": "The woman looked at the book in her hand under the light of the kerosene lamp.", "type": "action", "weight": 3 }, { "content": "Pan moves slightly downwards.", "type": "camera", "weight": 3 }, { "content": "The Women stop reading books.", "type": "action", "weight": 2 }, { "content": "The woman turned her head to look at the box on the bedside table.", "type": "action", "weight": 3 }, { "content": "The woman extended her right hand, opened the box, and searched inside for something.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0134", "video_path": "DREAM-1K/385.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 13.38, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on a metal container with some slight wear and scratches on its surface. A person holds the container with both hands and continuously shakes the handle on the container with their left hand. White particles kept falling from the container.\nNext, the camera cuts. A young woman held a blue glass bottle in her right hand, then tilted the bottle and the liquid inside dripped down. Then, the woman put away the bottle.\nThe camera cuts back to the woman's profile. The woman lowered her head and looked down. The camera switches again, displaying a metal circular baking tray filled with some liquid batter. The batter flows into the baking tray from above, forming a uniform circle.\nFinally, the camera cuts back to the woman. Women work with their heads down focused. The woman lifted a golden basin with both hands and tilted it in her direction, the erect basin covering her face.", "events": [ { "event": "At the beginning of the video, the camera focuses on a metal container with some slight wear and scratches on its surface. A person holds the container with both hands and continuously shakes the handle on the container with their left hand. White particles kept falling from the container.", "visual_elements": [ { "content": "The camera focuses on a metal container.", "type": "camera", "weight": 3 }, { "content": "There is some slight wear and scratches on the surface of the container.", "type": "attribute", "weight": 1 }, { "content": "A person holds the container with both hands.", "type": "attribute", "weight": 3 }, { "content": "Grasp the handle on the container with his left hand and shake it continuously.", "type": "action", "weight": 3 }, { "content": "There are white particles continuously falling from the container.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera cuts. A young woman held a blue glass bottle in her right hand, then tilted the bottle and the liquid inside dripped down. Then, the woman put away the bottle.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A young woman holding a blue glass bottle in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The woman tilted the glass bottle and the liquid inside dripped down.", "type": "action", "weight": 3 }, { "content": "The woman put away bottles.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts back to the woman's profile. The woman lowered her head and looked down. The camera switches again, displaying a metal circular baking tray filled with some liquid batter. The batter flows into the baking tray from above, forming a uniform circle.", "visual_elements": [ { "content": "The camera cuts to the woman's profile again.", "type": "camera", "weight": 3 }, { "content": "The woman lowered her head and looked down.", "type": "action", "weight": 3 }, { "content": "The camera cut again.", "type": "camera", "weight": 3 }, { "content": "A metal circular baking tray filled with some liquid batter.", "type": "attribute", "weight": 2 }, { "content": "The batter flows into the baking tray from above, forming a uniform circle.", "type": "attribute", "weight": 3 } ] }, { "event": "Finally, the camera cuts back to the woman. Women work with their heads down focused. The woman lifted a golden basin with both hands and tilted it in her direction, the erect basin covering her face.", "visual_elements": [ { "content": "The camera cuts to the woman again.", "type": "camera", "weight": 3 }, { "content": "The woman works with their heads down and focused.", "type": "action", "weight": 2 }, { "content": "The woman lifts a golden basin with both hands and poured it in her direction.", "type": "action", "weight": 3 }, { "content": "The erect basin covers the woman's face.", "type": "attribute", "weight": 3 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0135", "video_path": "DREAM-1K/386.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 7.5, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses an open field with a dark background and some reflections on the ground. There is a strong light source in the center of the screen, and there is smoke surrounding the light source. At the bottom of the screen, there is text displaying 'SPACEX | Reusable Rocket Tests'. A rocket shaped object fell into the center of the field, and the tail of the rocket sprayed strong flames and smoke. When the rocket hit the ground, a violent explosion occurred, and the scene was instantly covered by strong white light, followed by a huge fireball and scattered fragments.\nThe screen cuts to another scene with a daytime background, and the sea around the platform is clearly visible. A rocket is vertically parked on an open platform, tilting on the platform and gradually tilting to one side, eventually falling onto the platform, and smoke is dispersed. The rocket exploded when it fell and made contact with the ground. Flames and fragments scattered everywhere, and the scene was covered by strong light.", "events": [ { "event": "At the beginning of the video, the camera focuses an open field with a dark background and some reflections on the ground. There is a strong light source in the center of the screen, and there is smoke surrounding the light source. At the bottom of the screen, there is text displaying 'SPACEX | Reusable Rocket Tests'. A rocket shaped object fell into the center of the field, and the tail of the rocket sprayed strong flames and smoke. When the rocket hit the ground, a violent explosion occurred, and the scene was instantly covered by strong white light, followed by a huge fireball and scattered fragments.", "visual_elements": [ { "content": "The camera focuses an open field", "type": "camera", "weight": 3 }, { "content": "The background is dark night, with some reflections on the ground.", "type": "scene", "weight": 1 }, { "content": "There is a strong light source in the center of the screen, and there is smoke surrounding the light source.", "type": "scene", "weight": 2 }, { "content": "At the bottom of the screen, there is text displaying \"SPACEX | Reusable Rocket Tests\".", "type": "scene", "weight": 1 }, { "content": "A rocket shaped object fell in the center of the field.", "type": "action", "weight": 3 }, { "content": "The rocket's tail spewed out intense flames and smoke.", "type": "attribute", "weight": 3 }, { "content": "When the rocket hit the ground, a violent explosion occurred.", "type": "action", "weight": 3 }, { "content": "The screen is instantly covered by strong white light.", "type": "attribute", "weight": 2 }, { "content": "Subsequently, a scene of huge fireballs and scattered fragments appeared.", "type": "attribute", "weight": 2 } ] }, { "event": "The screen cuts to another scene with a daytime background, and the sea around the platform is clearly visible. A rocket is vertically parked on an open platform, tilting on the platform and gradually tilting to one side, eventually falling onto the platform, and smoke is dispersed. The rocket exploded when it fell and made contact with the ground. Flames and fragments scattered everywhere, and the scene was covered by strong light.", "visual_elements": [ { "content": "The screen cuts to another scene", "type": "camera", "weight": 3 }, { "content": "The background has turned into daytime, and the sea around the platform is clearly visible.", "type": "scene", "weight": 2 }, { "content": "A rocket is vertically parked on an open platform.", "type": "attribute", "weight": 3 }, { "content": "The rocket tilts on the platform, gradually tilting to one side, and finally falls onto the platform, dispersing smoke in all directions.", "type": "action", "weight": 3 }, { "content": "The rocket exploded when it fell and made contact with the ground.", "type": "action", "weight": 3 }, { "content": "Flames and fragments scattered everywhere, and the scene was covered by strong light.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0136", "video_path": "DREAM-1K/387.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 4.25, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a faucet. A man wearing dark long sleeved clothes holds a glass in his right hand under the faucet, and holds the handle of the faucet with his left hand, slowly rotating it open. The water flows out of the rusty faucet and into a transparent glass. In the background, you can see an old-fashioned metal faucet and a small basket made of metal wire hanging below the faucet.\nNext, the camera switches to the kitchen scene. A person wearing a white apron holds the handle wrapped in a towel with their left hand and is frying steak with a spatula in their right hand. Some kitchen utensils and cookware can be seen in the background.", "events": [ { "event": "At the beginning of the video, the camera focuses on a faucet. A man wearing dark long sleeved clothes holds a glass in his right hand under the faucet, and holds the handle of the faucet with his left hand, slowly rotating it open. The water flows out of the rusty faucet and into a transparent glass. In the background, you can see an old-fashioned metal faucet and a small basket made of metal wire hanging below the faucet.", "visual_elements": [ { "content": "The camera focuses on a faucet.", "type": "camera", "weight": 3 }, { "content": "A man wearing dark long sleeved clothes holds a glass under the faucet in his right hand.", "type": "action", "weight": 3 }, { "content": "The man holds the handle of the faucet with his left hand.", "type": "action", "weight": 3 }, { "content": "The man slowly rotated it open.", "type": "action", "weight": 3 }, { "content": "The water flows out of the rusty faucet and into the glass cup.", "type": "scene", "weight": 3 }, { "content": "In the background, you can see another metal faucet and a small basket made of metal wire hanging below the faucet.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera switches to the kitchen scene. A person wearing a white apron holds the handle wrapped in a towel with their left hand and is frying steak with a spatula in their right hand. Some kitchen utensils and cookware can be seen in the background.", "visual_elements": [ { "content": "Cut the camera to the kitchen scene.", "type": "camera", "weight": 3 }, { "content": "A person wearing a white apron holds the handle wrapped in a towel with their left hand.", "type": "attribute", "weight": 3 }, { "content": "A person wearing a white apron is frying steak with a spatula in their right hand.", "type": "action", "weight": 3 }, { "content": "Some kitchen utensils and cookware can be seen in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0137", "video_path": "DREAM-1K/388.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 5.25, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a man's face, lying naked with a painful expression and his body bound with tape. The camera captures a side view, showing his head and upper torso. The background is blurry.\nNext, the camera pans slightly downwards. One person tightly covered the man's face with a towel, while another person takes a showerhead and sprays water onto the man's face. The man twists his body in rebellion.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man's face, lying naked with a painful expression and his body bound with tape. The camera captures a side view, showing his head and upper torso. The background is blurry.", "visual_elements": [ { "content": "The camera focuses on a man's face.", "type": "camera", "weight": 3 }, { "content": "The man was lying naked, with a painful expression on his face, and his body was bound with tape.", "type": "attribute", "weight": 3 }, { "content": "The camera captures a side view, showing his head and upper torso.", "type": "camera", "weight": 3 }, { "content": "The background is blurry.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera pans slightly downwards. One person tightly covered the man's face with a towel, while another person takes a showerhead and sprays water onto the man's face. The man twists his body in rebellion.", "visual_elements": [ { "content": "The camera moves slightly downwards.", "type": "camera", "weight": 3 }, { "content": "A person's hands tightly covered the man's face with a towel.", "type": "action", "weight": 3 }, { "content": "Another person takes a showerhead and sprays water onto the man's face.", "type": "action", "weight": 3 }, { "content": "The man twists his body in rebellion.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0138", "video_path": "DREAM-1K/389.mp4", "video_source": "DREAM-1K", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 5.5, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a cardboard box with the words \"WELCOME TO THE TEAM\" written on the top. A person's hand appears in the picture, gently lifting the lid of the box with fingers. There is a hand grenade inside the box, with some dust and signs of wear on the surface of the grenade.\nThe camera cuts to a male soldier wearing camouflage clothing, with his upper body appearing in the frame. There is a label of \"BRIGGS\" on the soldier's clothes. His posture was slightly forward leaning, observing the situation inside the box.\nThe camera cuts back to the grenade inside the box again.", "events": [ { "event": "At the beginning of the video, the camera focuses on a cardboard box with the words \"WELCOME TO THE TEAM\" written on the top. A person's hand appears in the picture, gently lifting the lid of the box with fingers. There is a hand grenade inside the box, with some dust and signs of wear on the surface of the grenade.", "visual_elements": [ { "content": "The camera focuses on a cardboard box", "type": "camera", "weight": 3 }, { "content": "The top of the box is inscribed with the words' WELCOME TO THE TEAM '.", "type": "attribute", "weight": 3 }, { "content": "A person's hand appears in the picture.", "type": "scene", "weight": 3 }, { "content": "The fingers gently lift the lid of the box.", "type": "action", "weight": 3 }, { "content": "There is a grenade inside the box.", "type": "attribute", "weight": 3 }, { "content": "There are some traces of dust and wear on the surface of the grenade.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a male soldier wearing camouflage clothing, with his upper body appearing in the frame. There is a label of \"BRIGGS\" on the soldier's clothes. His posture was slightly forward leaning, observing the situation inside the box.", "visual_elements": [ { "content": "The camera cuts to a male soldier wearing camouflage clothing, with his upper body appearing in the frame.", "type": "camera", "weight": 3 }, { "content": "There is a label of \"BRIGGS\" on the clothes.", "type": "attribute", "weight": 1 }, { "content": "The soldier's posture was slightly forward leaning, observing the situation inside the box.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the grenade inside the box again.", "visual_elements": [ { "content": "The camera cuts back to the grenade inside the box again.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0139", "video_path": "DREAM-1K/390.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.75, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera is aimed at two characters. The person on the left is a woman wearing a light colored jacket and dark underwear, while the person on the right is a man wearing a white vest with his back facing the camera. The background is a forest with sparse trees.\nThe woman bends down and approacesh men. Her hair is curly and she wears golden earrings on her ears. The man turns to look at the woman, with longer hair tied in a small braid. The woman stands up straight, holding a bottle of wine in her hand. Next, the camera moves downwards to the left. The woman raises the bottle with her right hand, tips it upside down, and begins to pour the liquor.The wine flows out of the bottle and spilled on the ground. A woman holds the man's right hand wrapped in a bandage with her left hand.\nThe camera cuts to a scene of two people in front of a porch. The woman turns her head to the left and looks at the man. Then man turns his head to the left.", "events": [ { "event": "At the beginning of the video, the camera is aimed at two characters. The person on the left is a woman wearing a light colored jacket and dark underwear, while the person on the right is a man wearing a white vest with his back facing the camera. The background is a forest with sparse trees.", "visual_elements": [ { "content": "Aim the camera at two characters.", "type": "camera", "weight": 3 }, { "content": "The character on the left is a woman wearing a light colored jacket and dark colored underwear.", "type": "attribute", "weight": 3 }, { "content": "The person on the right is a man wearing a gray vest, with his back facing the camera.", "type": "attribute", "weight": 3 }, { "content": "The background is a forest with sparse trees.", "type": "scene", "weight": 1 } ] }, { "event": "The woman bends down and approacesh men. Her hair is curly and she wears golden earrings on her ears. The man turns to look at the woman, with longer hair tied in a small braid. The woman stands up straight, holding a bottle of wine in her hand. Next, the camera moves downwards to the left. The woman raises the bottle with her right hand, tips it upside down, and begins to pour the liquor.The wine flows out of the bottle and spilled on the ground. A woman holds the man's right hand wrapped in a bandage with her left hand.", "visual_elements": [ { "content": "The woman bends down and approaches the man.", "type": "action", "weight": 3 }, { "content": "The woman has curly hair and wears golden earrings on her ears.", "type": "attribute", "weight": 3 }, { "content": "The man turns to look at the woman", "type": "action", "weight": 3 }, { "content": "The man has long hair that is tied up in a small braid.", "type": "attribute", "weight": 2 }, { "content": "The woman stands up straight, holding a bottle of wine in her hand.", "type": "action", "weight": 3 }, { "content": "The camera moves downwards to the left.", "type": "camera", "weight": 3 }, { "content": "The woman raises the bottle with her right hand, tips it upside down, and begins to pour the liquor.", "type": "action", "weight": 3 }, { "content": "The wine flowsout of the bottle and spilled on the ground.", "type": "action", "weight": 3 }, { "content": "The woman holds the man's right hand wrapped in a bandage with her left hand.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a scene of two people in front of a porch. The woman turns her head to the left and looks at the man. Then man turns his head to the left.", "visual_elements": [ { "content": "The camera cuts to the man's face.", "type": "camera", "weight": 3 }, { "content": "The woman turns his head to the left and looks at the man.", "type": "action", "weight": 3 }, { "content": "The man turns his head to the left.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0140", "video_path": "DREAM-1K/391.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 6.25, "resolution": { "width": 1920, "height": 800 }, "caption": "At the start of the video, the camera focuses on two women sitting on outdoor chairs. The woman with black hair is wearing loose-fitting black clothing, while the woman with brown hair is dressed in a black leather jacket over a patterned top. In the background, a green wall and two red trash cans can be seen.\nNext, the camera pans upwards. The woman with black hair stands up from her chair, using her hands to push against her knees and leaning forward. At the same time, the woman with brown hair turns to her right and stands up.\nFinally, the camera pans slightly to the left. They reach out their hands to embrace each other. The woman with brown hair faces the camera, while the woman with black hair has her back to it. The woman with brown hair speaks.", "events": [ { "event": "At the start of the video, the camera focuses on two women sitting on outdoor chairs. The woman with black hair is wearing loose-fitting black clothing, while the woman with brown hair is dressed in a black leather jacket over a patterned top. In the background, a green wall and two red trash cans can be seen.", "visual_elements": [ { "content": "The camera focuses on two women.", "type": "camera", "weight": 3 }, { "content": "They sits on outdoor chairs.", "type": "scene", "weight": 3 }, { "content": "The woman with black hair is wearing loose-fitting black clothing.", "type": "attribute", "weight": 2 }, { "content": "The woman with brown hair is dressed in a black leather jacket over a patterned top.", "type": "attribute", "weight": 2 }, { "content": "In the background, a green wall and two red trash cans can be seen.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera pans upwards. The woman with black hair stands up from her chair, using her hands to push against her knees and leaning forward. At the same time, the woman with brown hair turns to her right and stands up.", "visual_elements": [ { "content": "The camera pans upwards.", "type": "camera", "weight": 3 }, { "content": "From her chair, the woman with black hair uses her hands to push against her knees.", "type": "action", "weight": 3 }, { "content": "The woman with black hair leans forward.", "type": "action", "weight": 3 }, { "content": "The woman with black hair stands up.", "type": "action", "weight": 3 }, { "content": "The woman with brown hair turns to her right.", "type": "action", "weight": 3 }, { "content": "The woman with brown hair stands up.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera pans slightly to the left. They reach out their hands to embrace each other. The woman with brown hair faces the camera, while the woman with black hair has her back to it. The woman with brown hair speaks.", "visual_elements": [ { "content": "The camera pans slightly to the left.", "type": "camera", "weight": 3 }, { "content": "They reach out their hands to embrace each other.", "type": "action", "weight": 3 }, { "content": "The woman with brown hair faces the camera, while the woman with black hair has her back to it.", "type": "action", "weight": 3 }, { "content": "The woman with brown hair speaks.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0141", "video_path": "DREAM-1K/392.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 4.62, "resolution": { "width": 1920, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on a man kneeling on a blanket laid out on the beach. He is wearing a striped shirt and dark shorts. The background features waves lapping against the shore, with a few people engaging in activities in the distance.\nNext, the camera pans upwards slightly. The man places his hands on the ground and stands up. After standing up, he adjusts his belt with both hands while turning around.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man kneeling on a blanket laid out on the beach. He is wearing a striped shirt and dark shorts. The background features waves lapping against the shore, with a few people engaging in activities in the distance.", "visual_elements": [ { "content": "The camera focuses on a man.", "type": "camera", "weight": 3 }, { "content": "The man kneels on a blanket laid out on the beach", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a striped shirt and dark shorts.", "type": "attribute", "weight": 2 }, { "content": "The background features waves lapping against the shore, with a few people engaging in activities in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera pans upwards slightly. The man places his hands on the ground and stands up. After standing up, he adjusts his belt with both hands while turning around.", "visual_elements": [ { "content": "The camera pans upwards slightly.", "type": "camera", "weight": 3 }, { "content": "The man places his hands on the ground.", "type": "action", "weight": 3 }, { "content": "The man places his hands stands up.", "type": "action", "weight": 3 }, { "content": "After the man stands up, he adjusts his belt with both hands while simultaneously turning around.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0142", "video_path": "DREAM-1K/393.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 9.0, "resolution": { "width": 1920, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses a city street at night, with the lights of tall buildings in the background. Under a streetlamp on the side of the street, a person is lying on the ground. Standing on one side of the person is a man, while a woman is kneeling on the other side. The man, wearing a dark coat and jeans, is bent over, searching the person lying on the ground. The woman, dressed in a light-colored coat and dark pants, has her back to the camera. In the background, a police car approaches the three people on the road. The woman leans to the right, raising her left hand.\nThe camera then cuts to a scene of the police car approaching and stopping at the roadside, with its emergency lights flashing and headlights illuminating the road ahead. The police car stops beside the three people. Next, the camera cuts to a side view of the woman and the man. The woman is looking forward with her head raised, while the man turns his head to look back.\nThe shot then cuts to a close-up of the police car. After the car comes to a stop, the door opens, and a male police officer steps out from the driver’s seat. He then closes the door with his right hand and turns to walk toward the camera. A female police officer steps out from the other side of the car.", "events": [ { "event": "At the beginning of the video, the camera focuses a city street at night, with the lights of tall buildings in the background. Under a streetlamp on the side of the street, a person is lying on the ground. Standing on one side of the person is a man, while a woman is kneeling on the other side. The man, wearing a dark coat and jeans, is bent over, searching the person lying on the ground. The woman, dressed in a light-colored coat and dark pants, has her back to the camera. In the background, a police car approaches the three people on the road. The woman leans to the right, raising her left hand.", "visual_elements": [ { "content": "The camera focues a city street at night.", "type": "camera", "weight": 3 }, { "content": "In the background, the lights of tall buildings are visible.", "type": "scene", "weight": 1 }, { "content": "Under a streetlamp on the side of the street, a person is lying on the ground.", "type": "attribute", "weight": 3 }, { "content": "A man stands on one side of the person lying on the ground, while a woman kneels on the other side.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a dark coat and jeans.", "type": "attribute", "weight": 2 }, { "content": "The man is bent over, searching through the person lying on the ground.", "type": "action", "weight": 3 }, { "content": "The woman is wearing a light-colored coat and dark pants, with her back to the camera.", "type": "attribute", "weight": 2 }, { "content": "In the background, a police car approaches the three people on the road.", "type": "scene", "weight": 3 }, { "content": "The woman leans to the right, raising her left hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a scene of the police car approaching and stopping at the roadside, with its emergency lights flashing and headlights illuminating the road ahead. The police car stops beside the three people. Next, the camera cuts to a side view of the woman and the man. The woman is looking forward with her head raised, while the man turns his head to look back.", "visual_elements": [ { "content": "The camera cuts to a scene of the police car approaching and then stopping at the roadside.", "type": "camera", "weight": 3 }, { "content": "The police car's emergency lights are flashing, and the headlights illuminate the road ahead.", "type": "scene", "weight": 1 }, { "content": "The police car stops beside the three people.", "type": "action", "weight": 2 }, { "content": "The camera cuts to a side view of the woman and the man.", "type": "camera", "weight": 3 }, { "content": "The woman raises her head and looks forward.", "type": "action", "weight": 3 }, { "content": "The man turns his head to look back.", "type": "action", "weight": 3 } ] }, { "event": "The shot then cuts to a close-up of the police car. After the car comes to a stop, the door opens, and a male police officer steps out from the driver’s seat. He then closes the door with his right hand and turns to walk toward the camera. A female police officer steps out from the other side of the car.", "visual_elements": [ { "content": "The camera cuts back to the police car.", "type": "camera", "weight": 3 }, { "content": "After the police car comes to a stop, the door opens.", "type": "action", "weight": 3 }, { "content": "A male police officer steps out from the driver's seat.", "type": "action", "weight": 3 }, { "content": "The male police officer closes the car door with his right hand.", "type": "action", "weight": 3 }, { "content": "The male police officer closes the door with his right hand and turns to walk toward the camera.", "type": "action", "weight": 3 }, { "content": "A female police officer steps out from the other side of the police car.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0143", "video_path": "DREAM-1K/394.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 5.12, "resolution": { "width": 1920, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on a man wearing a brown coat, standing by a stone railing at the riverbank. The background features the river and a bridge in the distance.\nThe camera then cuts to the man in the brown coat. The man, with both hands in his coat pockets, walks down a cobblestone street. On either side of the street are elaborately decorated lamp posts and green landscaping. Next, the man in the brown coat raises his left hand and glances at the watch on his wrist. Then, he lowers his left hand and places it back into his coat pocket. A woman in a light-colored coat enters from the right side of the frame, with her back to the camera. She walks from the right side of the frame to the left, heading in the opposite direction of the man in the brown coat. The camera slightly pans to the right.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man wearing a brown coat, standing by a stone railing at the riverbank. The background features the river and a bridge in the distance.", "visual_elements": [ { "content": "The camera focuses on a man.", "type": "camera", "weight": 3 }, { "content": "The man wears a brown coat.", "type": "attribute", "weight": 2 }, { "content": "The man stands by a stone railing at the riverbank.", "type": "attribute", "weight": 3 }, { "content": "The background features the river and a bridge in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then cuts to the man in the brown coat. The man, with both hands in his coat pockets, walks down a cobblestone street. On either side of the street are elaborately decorated lamp posts and green landscaping. Next, the man in the brown coat raises his left hand and glances at the watch on his wrist. Then, he lowers his left hand and places it back into his coat pocket. A woman in a light-colored coat enters from the right side of the frame, with her back to the camera. She walks from the right side of the frame to the left, heading in the opposite direction of the man in the brown coat. The camera slightly pans to the right.", "visual_elements": [ { "content": "The camera then cuts to the man in the brown coat.", "type": "camera", "weight": 3 }, { "content": "The man in the brown coat has his hands in his coat pockets.", "type": "attribute", "weight": 3 }, { "content": "The man in the brown coat is walking down a cobblestone street.", "type": "action", "weight": 3 }, { "content": "On either side of the street are elaborately decorated lamp posts and green landscaping.", "type": "scene", "weight": 1 }, { "content": "The man in the brown coat raises his left hand.", "type": "action", "weight": 3 }, { "content": "The man in the brown coat glances down at the watch on his wrist.", "type": "action", "weight": 3 }, { "content": "The man in the brown coat raises his right hand.", "type": "action", "weight": 3 }, { "content": "The man in the brown coat then places his left hand back into his coat pocket.", "type": "action", "weight": 3 }, { "content": "A woman in a light-colored coat enters the frame from the right side, with her back to the camera.", "type": "attribute", "weight": 2 }, { "content": "The woman walks from the right side of the frame to the left side.", "type": "action", "weight": 2 }, { "content": "The woman walks in the opposite direction of the man in the brown coat.", "type": "action", "weight": 2 }, { "content": "The camera slightly pans to the right.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0144", "video_path": "DREAM-1K/396.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 5.88, "resolution": { "width": 1920, "height": 944 }, "caption": "The video begins with four young men standing on a patch of green grass. The man on the left is wearing a black jacket and blue jeans and holding a basketball in his right hand. Opposite the man with the basketball stands a man with blond hair. He is wearing a black long-sleeved shirt and blue jeans. Next to them are two men. One man is wearing a green coat and gray sweatpants, while the other is wearing a blue long-sleeved shirt and blue jeans and carrying a backpack.\nThe man in black jacket throws the basketball at the man in the black long sleeves, who catches the ball. Then, the man in black long sleeves throws the basketball with both hands to the man in black jacket. The man in black jacket catches the ball with both hands and spread them out. At the same time, the man in black long sleeves waves his hands and smiles and talks to the man in black jacket. The man in black jacket then passes the basketball from his right hand to his left hand, and then his right hand points to the right, glancing to the right and watching the man in black long sleeves speak. Finally, the man in black jacket passes the basketball from his left hand to his right. The man with the backpack and the man in green jacket are talking happily next to them. A red brick building can be seen in the background with several trees in front of it. In the distance, several people are walking on the grass.\nThe camera wobbles and a woman in white enters from the left side of the frame. The woman in white is walking backwards and talking to the woman in red who is opposite her. The woman in red also walks into the frame from the left.", "events": [ { "event": "The video begins with four young men standing on a patch of green grass. The man on the left is wearing a black jacket and blue jeans and holding a basketball in his right hand. Opposite the man with the basketball stands a man with blond hair. He is wearing a black long-sleeved shirt and blue jeans. Next to them are two men. One man is wearing a green coat and gray sweatpants, while the other is wearing a blue long-sleeved shirt and blue jeans and carrying a backpack.", "visual_elements": [ { "content": "Four young men stand on a patch of green grass.", "type": "scene", "weight": 3 }, { "content": "The man on the left is wearing a black jacket and blue jeans and holding a basketball in his right hand.", "type": "attribute", "weight": 3 }, { "content": "Opposite the man with the basketball stands a man with blond hair.", "type": "attribute", "weight": 2 }, { "content": "The blond man is wearing a black long-sleeved shirt and blue jeans.", "type": "attribute", "weight": 2 }, { "content": "Next to them are two men.", "type": "attribute", "weight": 3 }, { "content": "One man is wearing a green coat and gray sweatpants.", "type": "attribute", "weight": 2 }, { "content": "The other man is wearing a blue long-sleeved shirt and blue jeans and carrying a backpack.", "type": "attribute", "weight": 2 } ] }, { "event": "The man in black jacket throws the basketball at the man in the black long sleeves, who catches the ball. Then, the man in black long sleeves throws the basketball with both hands to the man in black jacket. The man in black jacket catches the ball with both hands and spread them out. At the same time, the man in black long sleeves waves his hands and smiles and talks to the man in black jacket. The man in black jacket then passes the basketball from his right hand to his left hand, and then his right hand points to the right, glancing to the right and watching the man in black long sleeves speak. Finally, the man in black jacket passes the basketball from his left hand to his right. The man with the backpack and the man in green jacket are talking happily next to them. A red brick building can be seen in the background with several trees in front of it. In the distance, several people are walking on the grass.", "visual_elements": [ { "content": "The man in black jacket throws the basketball at the man in the black long sleeve.", "type": "action", "weight": 3 }, { "content": "The man in black long sleeves catches the ball.", "type": "action", "weight": 3 }, { "content": "The man in black long sleeves throws the basketball with both hands to the man in black jacket.", "type": "action", "weight": 3 }, { "content": "The man in black jacket catches the ball with both hands.", "type": "action", "weight": 3 }, { "content": "The man in black jacket spreads his hands out.", "type": "action", "weight": 3 }, { "content": "The man in black long sleeves waves his hands.", "type": "action", "weight": 2 }, { "content": "The man in black long sleeves smiles and talks to the man in black jacket.", "type": "action", "weight": 3 }, { "content": "The man in black jacket then passes the basketball from his right hand to his left hand.", "type": "action", "weight": 2 }, { "content": "The right hand of the man in black jacket points to the right, glancing to the right.", "type": "action", "weight": 3 }, { "content": "The man in black jacket watches the man in black long sleeves speak.", "type": "action", "weight": 3 }, { "content": "The man in black jacket passes the basketball from his left hand to his right.", "type": "action", "weight": 2 }, { "content": "The man with the backpack and the man in green jacket are talking happily next to them.", "type": "action", "weight": 1 }, { "content": "A red brick building can be seen in the background with several trees in front of it.", "type": "scene", "weight": 1 }, { "content": "Several people are walking on the grass.", "type": "scene", "weight": 1 } ] }, { "event": "The camera wobbles and a woman in white enters from the left side of the frame. The woman in white is walking backwards and talking to the woman in red who is opposite her. The woman in red also walks into the frame from the left.", "visual_elements": [ { "content": "The camera wobbles.", "type": "camera", "weight": 3 }, { "content": "A woman in white enters from the left side of the frame.", "type": "action", "weight": 2 }, { "content": "The woman in white is walking backwards.", "type": "action", "weight": 3 }, { "content": "The woman in white is talking to the woman in red.", "type": "action", "weight": 2 }, { "content": "The woman in red also walks into the frame from the left.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 26 }, { "index": "TUNA_0145", "video_path": "DREAM-1K/397.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 5.75, "resolution": { "width": 1920, "height": 944 }, "caption": "The video begins with the camera focusing on a middle-aged man, who is lying asleep in bed. The man is covered with a checked comforter and proppes on his side on a pillow. His hand gently grasped the edge of the pillow. The background is a darkened room.\nNext, the camera cuts to a residential area outside. On the right side of the street are fine houses, surrounded by green trees and lawns, and the sun shines brightly. The camera pans up to the left and a boy on an orange bike drives forward. The boy is wearing a red jacket, blue jeans and a gray helmet.\nThe camera pans following the boy's cycling path as he moves forward. He rides forward at a relatively fast speed, with his red jacket fluttering in the wind.", "events": [ { "event": "The video begins with the camera focusing on a middle-aged man, who is lying asleep in bed. The man is covered with a checked comforter and proppes on his side on a pillow. His hand gently grasped the edge of the pillow. The background is a darkened room.", "visual_elements": [ { "content": "The camera focuses on a middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The man is lying asleep in bed.", "type": "attribute", "weight": 3 }, { "content": "The man is covered with a checked comforter and proppes on his side on a pillow.", "type": "attribute", "weight": 2 }, { "content": "The man's hand gently grasped the edge of the pillow.", "type": "attribute", "weight": 2 }, { "content": "The background is a darkened room.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera cuts to a residential area outside. On the right side of the street are fine houses, surrounded by green trees and lawns, and the sun shines brightly. The camera pans up to the left and a boy on an orange bike drives forward. The boy is wearing a red jacket, blue jeans and a gray helmet.", "visual_elements": [ { "content": "The camera cuts to a residential area outside.", "type": "camera", "weight": 3 }, { "content": "On the right side of the street are fine houses, surrounded by green trees and lawns, and the sun shines brightly.", "type": "scene", "weight": 1 }, { "content": "The camera pans up to the left.", "type": "camera", "weight": 3 }, { "content": "A boy on an orange bike drives forward.", "type": "action", "weight": 3 }, { "content": "The boy is wearing a red jacket, blue jeans and a gray helmet.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans following the boy's cycling path as he moves forward. He rides forward at a relatively fast speed, with his red jacket fluttering in the wind.", "visual_elements": [ { "content": "The camera pans following the boy's cycling path as he moves forward.", "type": "camera", "weight": 3 }, { "content": "The boy rides forward at a relatively fast speed, with his red jacket fluttering in the wind.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0146", "video_path": "DREAM-1K/398.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 5.38, "resolution": { "width": 1920, "height": 800 }, "caption": "At the beginning of the video, the camera focuses on a spacious garage, with light shining in from the windows at the back, illuminating the entire space. There are two characters in the garage, a man and a woman, facing away from the camera. The man wears a light blue short-sleeved shirt and beige trousers, while the woman wears a striped shirt and dark jeans, with her hair tied up in a ponytail.\nThe camera pans slightly to the right. They stand in front of a car with their arms linked. The man turns his head to look at the woman. Subsequently, he lets go of the woman's arm, bends down, and lifts the car cover off the vehicle. As the car cover is uncovered, a white new car appears in the frame. The woman's gaze is fixed on the car. After fully removing the car cover and throwing it aside, the man turns his head to look at the woman.", "events": [ { "event": "At the beginning of the video, the camera focuses on a spacious garage, with light shining in from the windows at the back, illuminating the entire space. There are two characters in the garage, a man and a woman, facing away from the camera. The man wears a light blue short-sleeved shirt and beige trousers, while the woman wears a striped shirt and dark jeans, with her hair tied up in a ponytail.", "visual_elements": [ { "content": "The camera focuses on a spacious garage.", "type": "camera", "weight": 3 }, { "content": "Light shines in from the windows at the back of the garage, illuminating the entire space.", "type": "scene", "weight": 1 }, { "content": "There are two characters in the garage, a man and a woman, facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The man wears a light blue short-sleeved shirt and beige trousers.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a striped shirt and dark jeans, with her hair tied up in a ponytail.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans slightly to the right. They stand in front of a car with their arms linked. The man turns his head to look at the woman. Subsequently, he lets go of the woman's arm, bends down, and lifts the car cover off the vehicle. As the car cover is uncovered, a white new car appears in the frame. The woman's gaze is fixed on the car. After fully removing the car cover and throwing it aside, the man turns his head to look at the woman.", "visual_elements": [ { "content": "The camera pans slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The two stand in front of a car with their arms linked.", "type": "attribute", "weight": 3 }, { "content": "The man turns his head to look at the woman.", "type": "action", "weight": 2 }, { "content": "The man lets go of the woman's arm", "type": "action", "weight": 3 }, { "content": "The man bends down, and lifts the car cover off the vehicle.", "type": "action", "weight": 3 }, { "content": "As the car cover is uncovered, a white new car appears in the frame.", "type": "attribute", "weight": 2 }, { "content": "The woman's gaze is fixed on the car.", "type": "action", "weight": 2 }, { "content": "The man throws the car cover aside after fully removing it.", "type": "action", "weight": 2 }, { "content": "The man turns his head to look at the woman.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0147", "video_path": "DREAM-1K/399.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 2.5, "resolution": { "width": 1840, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on a swimming pool. A woman dressed in a white dress with the number \"8\" printed in red on the back stands by the poolside. She has brown hair that cascades over her shoulders. She is facing a man dressed in dark clothing. Suddenly, the man loses his balance, leaning backwards with his arms flailing in an attempt to regain stability. The woman holds a red hat in her hand. The man continues to fall backwards and eventually plunges into the pool, causing a large splash of water.\nThe camera cuts to another angle, revealing more of the party scene. The woman in the white dress looks down at the man who has fallen into the water. In the background, there are numerous pumpkin lanterns and decorative lights. Many people in the crowd are dressed in various costumes. The splash of water draws the attention of those around.\nThe camera cuts again, this time focusing on another couple by the poolside. The man wears a red and white striped long-sleeved shirt and a red hat, while the woman wears tight-fitting black pants and a white top, with cat ears as a head accessory. The kiss between the two is interrupted,and both turn their heads to look at the pool.", "events": [ { "event": "At the beginning of the video, the camera focuses on a swimming pool. A woman dressed in a white dress with the number \"8\" printed in red on the back stands by the poolside. She has brown hair that cascades over her shoulders. She is facing a man dressed in dark clothing. Suddenly, the man loses his balance, leaning backwards with his arms flailing in an attempt to regain stability. The woman holds a red hat in her hand. The man continues to fall backwards and eventually plunges into the pool, causing a large splash of water.", "visual_elements": [ { "content": "The camera focuses on a swimming pool.", "type": "camera", "weight": 3 }, { "content": "There stands a woman by the poolside, dressed in a white dress with the number \"8\" printed in red on the back.", "type": "attribute", "weight": 2 }, { "content": "She has brown hair that cascades over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "She is facing a man dressed in dark clothing.", "type": "attribute", "weight": 3 }, { "content": "The man loses his balance.", "type": "action", "weight": 3 }, { "content": "The man leans backwards with his arms flailing in an attempt to regain stability.", "type": "action", "weight": 3 }, { "content": "The woman holds a red hat in her hand.", "type": "attribute", "weight": 1 }, { "content": "The man continues to fall backwards and eventually plunges into the pool, causing a large splash of water.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to another angle, revealing more of the party scene. The woman in the white dress looks down at the man who has fallen into the water. In the background, there are numerous pumpkin lanterns and decorative lights. Many people in the crowd are dressed in various costumes. The splash of water draws the attention of those around.", "visual_elements": [ { "content": "The camera cuts to another angle, revealing more of the party scene.", "type": "camera", "weight": 3 }, { "content": "The woman in the white dress looks down at the man who has fallen into the water.", "type": "action", "weight": 2 }, { "content": "In the background, there are numerous pumpkin lanterns and decorative lights.", "type": "scene", "weight": 1 }, { "content": "Many people in the crowd are dressed in various costumes.", "type": "scene", "weight": 1 }, { "content": "The splash of water draws the attention of those around.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts again, this time focusing on another couple by the poolside. The man wears a red and white striped long-sleeved shirt and a red hat, while the woman wears tight-fitting black pants and a white top, with cat ears as a head accessory. The kiss between the two is interrupted,and both turn their heads to look at the pool.", "visual_elements": [ { "content": "The camera cuts again, this time focusing on another couple by the poolside.", "type": "camera", "weight": 3 }, { "content": "The man wears a red and white striped long-sleeved shirt and a red hat.", "type": "attribute", "weight": 2 }, { "content": "The woman wears tight-fitting black pants and a white top, with cat ears as a head accessory.", "type": "attribute", "weight": 2 }, { "content": "The kiss between the two is interrupted.", "type": "action", "weight": 3 }, { "content": "The two turn their heads to look at the pool.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0148", "video_path": "DREAM-1K/400.mp4", "video_source": "DREAM-1K", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 4.25, "resolution": { "width": 1920, "height": 1024 }, "caption": "At the beginning of the video, the camera focuses on a man dressed in a brown coat and a white sweater. He wears a checkered hat, a dark scarf around his neck, and a pair of leopard-print glasses. The background is a building, with a red sunshade next to it. The man looks down at the camera. He raises his left hand to take off his glasses from his face. Then the camera pans slightly upwards and to the right. The man looks up towards the left front. His lips move as if he's speaking.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man dressed in a brown coat and a white sweater. He wears a checkered hat, a dark scarf around his neck, and a pair of leopard-print glasses. The background is a building, with a red sunshade next to it. The man looks down at the camera. He raises his left hand to take off his glasses from his face. Then the camera pans slightly upwards and to the right. The man looks up towards the left front. His lips move as if he's speaking.", "visual_elements": [ { "content": "The camera focuses on a man.", "type": "camera", "weight": 3 }, { "content": "The man is dressed in a brown coat and a white sweater. He wears a checkered hat, a dark scarf around his neck, and a pair of leopard-print glasses.", "type": "attribute", "weight": 3 }, { "content": "The background is a building, with a red sunshade next to it.", "type": "scene", "weight": 2 }, { "content": "The man looks down at the camera.", "type": "attribute", "weight": 2 }, { "content": "The man raises his left hand to take off his glasses from his face.", "type": "action", "weight": 3 }, { "content": "The camera pans slightly upwards and to the right.", "type": "camera", "weight": 3 }, { "content": "The man looks up towards the left front.", "type": "action", "weight": 3 }, { "content": "The man's lips move.", "type": "action", "weight": 1 } ] } ], "n_events": 1, "n_elements": 8 }, { "index": "TUNA_0149", "video_path": "VELOCITI/v_--ifbq2xY6I_seg_150_160.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a shirtless man, filming from his left rear. The man faces away from the camera, looking towards the right front. The man turns his head to his left while speaking, then spins around to face the camera.\nThe camera cuts to a woman wearing a light blue shirt. She has light-colored hair and pushes a cart filled with some cleaning supplies. She speaks towards the front and pushes the cart back and forth.\nThe camera cuts back to the man, who is bending down and looking up.\nThe camera cuts back to the woman, who reaches into the cart with her right hand and picks up a spray bottle.\nThe camera cuts to the man. To his right in the scene, across from him, a hand holds a spray bottle and sprays it towards the man's face. The man closes his eyes. The hand holding the spray bottle exits from the right side of the scene. The man steps back, covering his face with his right hand.\nNext, the camera cuts to the woman. She looks angry and exits from the left side of the scene.\nFinally, the camera cuts back to the man, who is bowing his head and wiping his face with his right hand. The elevator door behind him slowly opens. The man turns his head to his right. Inside the elevator, there are many people, including a woman in a red top who looks surprised at what's ahead. The man spins around, then bends down and starts running quickly to the right side of the scene. The woman in the red top looks down, then covers her mouth with her right hand. A man in a suit stands to the left of the woman in the red top, watching what's happening.", "events": [ { "event": "The camera focuses on a shirtless man, filming from his left rear. The man faces away from the camera, looking towards the right front. The man turns his head to his left while speaking, then spins around to face the camera.", "visual_elements": [ { "content": "The camera focuses on a shirtless man, filming from his left rear.", "type": "camera", "weight": 3 }, { "content": "The man faces away from the camera, looking towards the right front.", "type": "attribute", "weight": 3 }, { "content": "The man turns his head to his left while speaking.", "type": "action", "weight": 3 }, { "content": "The man turns around to face the camera.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a woman wearing a light blue shirt. She has light-colored hair and pushes a cart filled with some cleaning supplies. She speaks towards the front and pushes the cart back and forth.", "visual_elements": [ { "content": "The camera cuts to a woman wearing a light blue shirt.", "type": "camera", "weight": 3 }, { "content": "The woman has light-colored hair.", "type": "attribute", "weight": 3 }, { "content": "The woman pushes a cart filled with some cleaning supplies.", "type": "attribute", "weight": 1 }, { "content": "The woman speaks towards the front and pushes the cart back and forth.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the man, who is bending down and looking up.", "visual_elements": [ { "content": "The camera cuts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man is bending down and looking up.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts back to the woman, who reaches into the cart with her right hand and picks up a spray bottle.", "visual_elements": [ { "content": "The camera cuts back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman reaches into the cart with her right hand and picks up a spray bottle.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the man. To his right in the scene, across from him, a hand holds a spray bottle and sprays it towards the man's face. The man closes his eyes. The hand holding the spray bottle exits from the right side of the scene. The man steps back, covering his face with his right hand.", "visual_elements": [ { "content": "The camera cuts to the man.", "type": "camera", "weight": 3 }, { "content": "Across from the man, a hand holds a spray bottle and sprays it towards the man's face.", "type": "action", "weight": 3 }, { "content": "The man closes his eyes.", "type": "action", "weight": 3 }, { "content": "The hand holding the spray bottle exits from the right side of the scene.", "type": "action", "weight": 2 }, { "content": "The man steps back, covering his face with his right hand.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to the woman. She looks angry and exits from the left side of the scene.", "visual_elements": [ { "content": "The camera cuts to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman looks angry.", "type": "attribute", "weight": 3 }, { "content": "The woman exits from the left side of the scene.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts back to the man, who is bowing his head and wiping his face with his right hand. The elevator door behind him slowly opens. The man turns his head to his right. Inside the elevator, there are many people, including a woman in a red top who looks surprised at what's ahead. The man spins around, then bends down and starts running quickly to the right side of the scene. The woman in the red top looks down, then covers her mouth with her right hand. A man in a suit stands to the left of the woman in the red top, watching what's happening.", "visual_elements": [ { "content": "The camera finally cuts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man is bowing his head and wiping his face with his right hand.", "type": "action", "weight": 3 }, { "content": "The elevator door behind him slowly opens.", "type": "action", "weight": 3 }, { "content": "The man turns his head to his right.", "type": "action", "weight": 3 }, { "content": "There are many people standing in the elevator.", "type": "attribute", "weight": 2 }, { "content": "Among them, a woman in a red top looks surprised at what's ahead.", "type": "attribute", "weight": 2 }, { "content": "The man turns around, then bends down and starts running quickly to the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The woman in the red top looks down, then covers her mouth with her right hand.", "type": "action", "weight": 2 }, { "content": "A man in a suit stands to the left of the woman in the red top, watching what's happening.", "type": "attribute", "weight": 2 } ] } ], "n_events": 7, "n_elements": 29 }, { "index": "TUNA_0150", "video_path": "VELOCITI/v_-5twCD8tAMc_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera remains stationary. The scene shows an expansive meadow under a clear sky with large patches of clouds. In the forefront of the scene, a man wearing a military hat rides a white horse. A long rope connects the white horse to the wrists of a man dressed in white clothing and a white hat, as they move together towards the left of the scene. Subsequently, the scene shows another military hat-wearing man riding a brown horse, pulling a man wearing a straw hat and a red coat with a rope. As the man in the red coat moves to the center of the scene, he suddenly falls to the ground.\nThe camera pans down, and he then rolls onto his right side.\nThe camera pans up to the left. The man in the red coat, with his back to the camera, stands up and quickly bends forward to his left, moving rapidly before lying prone on the ground with his upper torso facing forward.\nThe camera cuts to a scene showing an officer riding a white horse, pulling a man wearing a red sleeveless coat forward as he runs. The man is pulled down to the ground in a crouched position and then dragged on his back towards the left of the scene, eventually disappearing from view.\nFinally, the camera pans down and then to the left. Immediately after, a man wearing a black sleeveless coat is similarly pulled to the ground and rolls over.", "events": [ { "event": "At the beginning of the video, the camera remains stationary. The scene shows an expansive meadow under a clear sky with large patches of clouds. In the forefront of the scene, a man wearing a military hat rides a white horse. A long rope connects the white horse to the wrists of a man dressed in white clothing and a white hat, as they move together towards the left of the scene. Subsequently, the scene shows another military hat-wearing man riding a brown horse, pulling a man wearing a straw hat and a red coat with a rope. As the man in the red coat moves to the center of the scene, he suddenly falls to the ground.", "visual_elements": [ { "content": "The scene features an expansive grassland with a clear sky and large patches of clouds.", "type": "scene", "weight": 2 }, { "content": "A man wearing a military hat rides a white horse.", "type": "attribute", "weight": 2 }, { "content": "A long rope connects the white horse to the wrists of a man dressed in white clothing and wearing a white hat.", "type": "attribute", "weight": 3 }, { "content": "The man wearing military hat, the white horse, and the man wearing white hat move together towards the left of the scene.", "type": "action", "weight": 3 }, { "content": "The scene shows a military hat-wearing man riding a brown horse.", "type": "camera", "weight": 3 }, { "content": "The man wearing military hat pulls a man wearing a straw hat and a red coat with a rope.", "type": "attribute", "weight": 2 }, { "content": "When the man in the red coat moves to the center of the scene, he suddenly falls to the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans down, and he then rolls onto his right side.", "visual_elements": [ { "content": "The camera pans down.", "type": "camera", "weight": 3 }, { "content": "The man in the red coat rolls onto his right side.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans up to the left. The man in the red coat, with his back to the camera, stands up and quickly bends forward to his left, moving rapidly before lying prone on the ground with his upper torso facing forward.", "visual_elements": [ { "content": "The camera pans up and to the left, with the man in the red coat facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The man in the red coat stands up and quickly bends forward to his left.", "type": "action", "weight": 3 }, { "content": "The man in the red coat lies prone on the ground with his upper torso facing forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a scene showing an officer riding a white horse, pulling a man wearing a red sleeveless coat forward as he runs. The man is pulled down to the ground in a crouched position and then dragged on his back towards the left of the scene, eventually disappearing from view.", "visual_elements": [ { "content": "The camera cuts to a scene showing an officer riding a white horse, pulling a man wearing a red sleeveless coat forward as he runs.", "type": "camera", "weight": 3 }, { "content": "The man in the red sleeveless coat is pulled down to the ground face-first.", "type": "action", "weight": 3 }, { "content": "The man in the red sleeveless coat is dragged on his back towards the left of the scene and eventually disappears from view.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera pans down and then to the left. Immediately after, a man wearing a black sleeveless coat is similarly pulled to the ground and rolls over.", "visual_elements": [ { "content": "The camera finally pans down and then to the left.", "type": "camera", "weight": 3 }, { "content": "A man wearing a black sleeveless coat is similarly pulled to the ground and rolls over.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 17 }, { "index": "TUNA_0151", "video_path": "VELOCITI/v_-CzO7z1dZ1A_seg_105_115.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on the interior of a room where a desk lamp is lit, and several posters are stuck on the walls. Inside the room, there is a person wearing a dark checkered shirt and a white cloth hood. The person in the hood holds a slingshot in their right hand and slowly straightens up. There is a hole in the white cloth hood, and the person drops the slingshot, then walks forward. Behind the person's right side, there is a fallen chair.\nThe camera cuts to a scene. The lighting is dim. A woman with long blonde hair is crouching beside a cabinet, facing away from the camera. The camera pans up, and the woman pulls something backwards with her left hand. Then, the camera pans to the right. The woman turns to the right of the scene while raising a red saw upwards.\nThe camera cuts to a scene to the right of the woman and pans to the right. The woman walks forward with the saw in both hands. Ahead of her is the person in the white cloth hood. This person leans backwards, with their right hand on their chest, retreating. The camera pans down and to the right. Sparks fly from the saw, cutting into the right arm of the person in the white hood. The person trips over a small wooden table behind them and falls face-down to the ground.\nFinally, the camera cuts back to the woman, who looks down to her left.", "events": [ { "event": "The camera focuses on the interior of a room where a desk lamp is lit, and several posters are stuck on the walls. Inside the room, there is a person wearing a dark checkered shirt and a white cloth hood. The person in the hood holds a slingshot in their right hand and slowly straightens up. There is a hole in the white cloth hood, and the person drops the slingshot, then walks forward. Behind the person's right side, there is a fallen chair.", "visual_elements": [ { "content": "The scene focuses on the interior of a room.", "type": "camera", "weight": 3 }, { "content": "A desk lamp is lit inside the room.", "type": "scene", "weight": 2 }, { "content": "Several posters are stuck on the walls of the room.", "type": "scene", "weight": 2 }, { "content": "There is a person in the room wearing a dark checkered shirt and a white cloth hood.", "type": "attribute", "weight": 3 }, { "content": "The person in the white cloth hood holds a slingshot in their right hand and slowly straightens up.", "type": "action", "weight": 3 }, { "content": "There is a hole in the white cloth hood.", "type": "attribute", "weight": 2 }, { "content": "The person in the white cloth hood drops the slingshot from their hand and walks forward.", "type": "action", "weight": 3 }, { "content": "Behind the right side of the person in the white cloth hood, there is a fallen chair.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a scene. The lighting is dim. A woman with long blonde hair is crouching beside a cabinet, facing away from the camera. The camera pans up, and the woman pulls something backwards with her left hand. Then, the camera pans to the right. The woman turns to the right of the scene while raising a red saw upwards.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The lighting is dim.", "type": "scene", "weight": 2 }, { "content": "A woman with long blonde hair is crouching beside a cabinet, facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The camera pans up.", "type": "camera", "weight": 3 }, { "content": "The woman pulls something backwards with her left hand.", "type": "action", "weight": 3 }, { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The woman turns to the right of the scene while raising a red saw upwards.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a scene to the right of the woman and pans to the right. The woman walks forward with the saw in both hands. Ahead of her is the person in the white cloth hood. This person leans backwards, with their right hand on their chest, retreating. The camera pans down and to the right. Sparks fly from the saw, cutting into the right arm of the person in the white hood. The person trips over a small wooden table behind them and falls face-down to the ground.", "visual_elements": [ { "content": "The camera cuts to the right of the woman and pans to the right.", "type": "camera", "weight": 3 }, { "content": "The woman walks forward with the saw in both hands.", "type": "action", "weight": 3 }, { "content": "Ahead of the woman is the person in the white cloth hood.", "type": "attribute", "weight": 3 }, { "content": "The person in the white cloth hood leans backwards with their right hand on their chest.", "type": "attribute", "weight": 3 }, { "content": "The person in the white cloth hood retreats.", "type": "action", "weight": 3 }, { "content": "The camera pans down.", "type": "camera", "weight": 3 }, { "content": "Sparks fly from the saw, cutting into the right arm of the person in the white hood.", "type": "action", "weight": 3 }, { "content": "The person in the white cloth hood trips over a small wooden table behind them and falls face-down to the ground.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts back to the woman, who looks down to her left.", "visual_elements": [ { "content": "The camera finally cuts to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman looks down to her left.", "type": "attribute", "weight": 3 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0152", "video_path": "VELOCITI/v_-sJezi3j7O8_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an indoor scene. A young man wearing a black vest and dark pants lies on the ground, with strong arms. To the right of the vest-wearing man, someone in khaki pants is pulling his right hand. In front of and to the right of the vest-wearing man, a hand holds a knife.\nThen, the camera cuts. A man in a brown jacket throws a frying pan forward with both hands. The camera cuts to a bald man in a gray hoodie, who uses his arm to block the flying frying pan. The frying pan moves towards the bald man's left rear. In the background, a fat man in a gray sleeveless jacket dodges the frying pan.\nThe camera cuts again to show the brown jacket man spraying a can towards the bald man's face, who shields his eyes with his right arm.\nNext, the camera cuts to underneath a table. The man in the black vest takes out a gun from under the table.\nThe camera cuts again to the brown jacket man, who stands near the door, turns to his left, and runs towards the door. In the background, a long-haired woman in a gray T-shirt sits and looks at her hands.\nThe camera cuts to a room decorated with a Christmas tree and then pans left. The brown jacket man pushes down a mesh door and runs towards the left of the scene, waving his arms. As he runs past the Christmas tree, he glances back.\nNext, the camera cuts to a man in a black coat holding a knife in his right hand, standing near the door. He turns his head to his right to shout, then turns to his left and backs away towards the door. In the background, a terrace, a chair, and a lamp are visible.\nThe camera cuts and finally pans left. The brown jacket man runs through a room with white square tiles and then runs forward with his back to the camera.", "events": [ { "event": "At the beginning of the video, the camera focuses on an indoor scene. A young man wearing a black vest and dark pants lies on the ground, with strong arms. To the right of the vest-wearing man, someone in khaki pants is pulling his right hand. In front of and to the right of the vest-wearing man, a hand holds a knife.", "visual_elements": [ { "content": "The camera focuses on an indoor scene.", "type": "camera", "weight": 3 }, { "content": "A young man wearing a black vest and dark pants lies on the ground.", "type": "attribute", "weight": 3 }, { "content": "A person in khaki pants is pulling the vest-wearing man's right hand.", "type": "attribute", "weight": 2 }, { "content": "In front of and to the right of the vest-wearing man, a hand holds a knife.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, the camera cuts. A man in a brown jacket throws a frying pan forward with both hands. The camera cuts to a bald man in a gray hoodie, who uses his arm to block the flying frying pan. The frying pan moves towards the bald man's left rear. In the background, a fat man in a gray sleeveless jacket dodges the frying pan.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A man in a brown jacket throws a frying pan forward with both hands.", "type": "action", "weight": 3 }, { "content": "The camera cuts to a bald man wearing a gray hoodie and sleeveless jacket.", "type": "camera", "weight": 3 }, { "content": "The bald man uses his arm to block the flying frying pan.", "type": "action", "weight": 3 }, { "content": "The frying pan moves towards the bald man's left rear.", "type": "scene", "weight": 2 }, { "content": "In the background, a fat man in a gray sleeveless jacket dodges the frying pan.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts again to show the brown jacket man spraying a can towards the bald man's face, who shields his eyes with his right arm.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "A curly-haired man sprays a can towards the bald man's face.", "type": "action", "weight": 3 }, { "content": "The bald man shields his eyes with his right arm.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to underneath a table. The man in the black vest takes out a gun from under the table.", "visual_elements": [ { "content": "The camera cuts to underneath a table.", "type": "camera", "weight": 3 }, { "content": "The man in the black vest takes out a gun from under the table.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again to the brown jacket man, who stands near the door, turns to his left, and runs towards the door. In the background, a long-haired woman in a gray T-shirt sits and looks at her hands.", "visual_elements": [ { "content": "The camera cuts back to the brown jacket man, showing him near the door.", "type": "camera", "weight": 3 }, { "content": "The brown jacket man turns to his left and runs towards the door.", "type": "action", "weight": 3 }, { "content": "In the background, a long-haired woman in a gray T-shirt sits and looks at her hands.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a room decorated with a Christmas tree and then pans left. The brown jacket man pushes down a mesh door and runs towards the left of the scene, waving his arms. As he runs past the Christmas tree, he glances back.", "visual_elements": [ { "content": "The camera cuts to a room decorated with a Christmas tree and then pans left.", "type": "camera", "weight": 3 }, { "content": "The brown jacket man pushes down a mesh door and runs towards the left of the scene, waving his arms.", "type": "action", "weight": 3 }, { "content": "The brown jacket man glances back as he runs past the Christmas tree.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera cuts to a man in a black coat holding a knife in his right hand, standing near the door. He turns his head to his right to shout, then turns to his left and backs away towards the door. In the background, a terrace, a chair, and a lamp are visible.", "visual_elements": [ { "content": "The camera cuts to a man in a black coat holding a knife in his right hand, standing near a door.", "type": "camera", "weight": 3 }, { "content": "The knife-holding man turns his head to his right to shout, then turns to his left and backs away towards the door.", "type": "action", "weight": 3 }, { "content": "In the background, a terrace, a chair, and a lamp are visible.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts and finally pans left. The brown jacket man runs through a room with white square tiles and then runs forward with his back to the camera.", "visual_elements": [ { "content": "The camera cuts again, and finally pans left.", "type": "camera", "weight": 3 }, { "content": "The brown jacket man runs through a room with white square tiles and then runs forward with his back to the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 8, "n_elements": 26 }, { "index": "TUNA_0153", "video_path": "VELOCITI/v_-yzEjTjo2IA_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the scene shows a dimly lit corridor. The corridor is narrow, with burning flames on both sides. On the left side of the corridor, there is a woman bending over. In front of her is another woman holding a silver long sword in her right hand.\nThe camera cuts. A woman with long blonde hair grabs the railing with both hands, looking ahead.\nThen, the camera cuts to a closer perspective of the two women. The woman in front of the camera, with black long hair and decorations on her head, faces away from the camera. In front of her is a woman with short black hair. The short-haired woman swings her knife to the left of the scene, aiming at the long-haired woman.\nThe camera cuts to behind the short-haired woman. The short-haired woman faces away from the camera, wearing a black tight-fitting outfit. The long-haired woman faces the camera, dressed provocatively with metal decorations on her face. She raises her sword to block the short-haired woman's attack.\nThe camera cuts to a scene where the long-haired woman is on the right side of the scene.\nThe camera cuts to a scene where the short-haired woman is on the right side of the scene, and the long-haired woman swings her sword at the short-haired woman's abdomen.\nThe camera cuts to a front view of the long-haired woman. The short-haired woman ducks low, dodging the long-haired woman's sword swing.\nThe camera cuts to the back of the long-haired woman, then to the back of the short-haired woman. Both women wield their knives in a stalemate, with the blades crossing in an \"X\" shape.\nThe camera cuts to a close-up of the short-haired woman's face, then to the back of the long-haired woman, and then a close-up of the short-haired woman again. The short-haired woman swings her knife, injuring the long-haired woman.\nThe camera cuts to a close-up of the short-haired woman's face. The short-haired woman elbows the long-haired woman's face with her right arm.\nThe camera cuts to the back of the short-haired woman. The long-haired woman, after being elbowed, falls backward to the right of the scene.\nThe camera cuts to a front view of the short-haired woman, panting.\nThen, the camera cuts to a front view of the long-haired woman, facing right towards the camera, panting.\nThe camera cuts to a front view of the short-haired woman, then pulls back to show her from a distance. The short-haired woman swings her knife to initiate an attack.\nThe camera cuts to the back of the long-haired woman. The long-haired woman jumps upwards.\nThe camera cuts to a front view of the short-haired woman. The short-haired woman swings her knife at the long-haired woman with both hands.\nThe camera cuts to the back of the short-haired woman. The long-haired woman blocks the short-haired woman's attack with her sword held horizontally.\nThe camera cuts to a close-up of the short-haired woman's face, then to her back, and then back to her front. The long-haired woman suddenly ducks her head and drops her knife.\nThe camera cuts to a front view of the long-haired woman, then cuts to behind her. The long-haired woman holds her sword with both hands, raises it above her head, and swings it forward. The short-haired woman turns to the left of the scene.\nThe camera cuts to a front view of the long-haired woman, shouting ahead. The short-haired woman is on the left side of the long-haired woman.\nThe camera cuts to the back of the long-haired woman. The long-haired woman is in the center of the scene, bending forward. The short-haired woman faces left towards the camera, swings her sword to the left of the scene, then turns her head to the right to look at the long-haired woman.\nFinally, the camera cuts to a scene showing the long-haired woman from below her face.", "events": [ { "event": "At the beginning of the video, the scene shows a dimly lit corridor. The corridor is narrow, with burning flames on both sides. On the left side of the corridor, there is a woman bending over. In front of her is another woman holding a silver long sword in her right hand.", "visual_elements": [ { "content": "The camera shows a dimly lit corridor.", "type": "camera", "weight": 3 }, { "content": "The corridor is narrow, with burning flames on both sides.", "type": "scene", "weight": 2 }, { "content": "On the left side of the corridor, there is a woman bending over.", "type": "attribute", "weight": 3 }, { "content": "In front of the bending woman is another woman holding a silver long sword in her right hand.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts. A woman with long blonde hair grabs the railing with both hands, looking ahead.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A woman with long blonde hair grabs the railing with both hands.", "type": "action", "weight": 3 }, { "content": "The blonde-haired woman looks ahead.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, the camera cuts to a closer perspective of the two women. The woman in front of the camera, with black long hair and decorations on her head, faces away from the camera. In front of her is a woman with short black hair. The short-haired woman swings her knife to the left of the scene, aiming at the long-haired woman.", "visual_elements": [ { "content": "The camera cuts to a closer perspective of the two women in the corridor.", "type": "camera", "weight": 3 }, { "content": "The woman closer to the camera faces away from it, with black long hair and decorations on her head.", "type": "attribute", "weight": 3 }, { "content": "In front of the long-haired woman is a woman with short black hair.", "type": "attribute", "weight": 3 }, { "content": "The short-haired woman swings her knife to the left of the scene, aiming at the long-haired woman.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to behind the short-haired woman. The short-haired woman faces away from the camera, wearing a black tight-fitting outfit. The long-haired woman faces the camera, dressed provocatively with metal decorations on her face. She raises her sword to block the short-haired woman's attack.", "visual_elements": [ { "content": "The camera cuts to behind the short-haired woman.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman faces away from the camera, wearing a black vest.", "type": "attribute", "weight": 3 }, { "content": "The long-haired woman faces the camera, dressed provocatively with painted facial decorations.", "type": "attribute", "weight": 3 }, { "content": "The long-haired woman raises her sword to block the short-haired woman's attack.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a scene where the long-haired woman is on the right side of the scene.", "visual_elements": [ { "content": "The camera cuts to a scene where the long-haired woman is on the right side of the scene.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to a scene where the short-haired woman is on the right side of the scene, and the long-haired woman swings her sword at the short-haired woman's abdomen.", "visual_elements": [ { "content": "The camera cuts to a scene where the short-haired woman is on the right side of the scene.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman swings her sword at the short-haired woman's abdomen.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the long-haired woman. The short-haired woman ducks low, dodging the long-haired woman's sword swing.", "visual_elements": [ { "content": "The camera cuts to a front view of the long-haired woman.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman ducks low.", "type": "attribute", "weight": 2 }, { "content": "The short-haired woman dodges the long-haired woman's sword swing.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to the back of the long-haired woman, then to the back of the short-haired woman. Both women wield their knives in a stalemate, with the blades crossing in an \"X\" shape.", "visual_elements": [ { "content": "The camera cuts to the back of the long-haired woman, then to the back of the short-haired woman.", "type": "camera", "weight": 3 }, { "content": "Both women wield their knives in a stalemate.", "type": "attribute", "weight": 2 }, { "content": "The blades cross in an \"X\" shape.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a close-up of the short-haired woman's face, then to the back of the long-haired woman, and then a close-up of the short-haired woman again. The short-haired woman swings her knife, injuring the long-haired woman.", "visual_elements": [ { "content": "The camera cuts to a close-up of the short-haired woman's face, then to the back of the long-haired woman, and then a close-up of the short-haired woman again.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman swings her knife, injuring the long-haired woman.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up of the short-haired woman's face. The short-haired woman elbows the long-haired woman's face with her right arm.", "visual_elements": [ { "content": "The camera cuts to a close-up of the short-haired woman's face.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman elbows the long-haired woman's face with her right arm.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the back of the short-haired woman. The long-haired woman, after being elbowed, falls backward to the right of the scene.", "visual_elements": [ { "content": "The camera cuts to the back of the short-haired woman.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman falls backward to the right of the scene after being elbowed.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the short-haired woman, panting.", "visual_elements": [ { "content": "The camera cuts to a front view of the short-haired woman.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman is panting.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera cuts to a front view of the long-haired woman, facing right towards the camera, panting.", "visual_elements": [ { "content": "The camera cuts to a front view of the long-haired woman.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman faces right towards the camera.", "type": "attribute", "weight": 3 }, { "content": "The long-haired woman is panting.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the short-haired woman, then pulls back to show her from a distance. The short-haired woman swings her knife to initiate an attack.", "visual_elements": [ { "content": "The camera cuts to a front view of the short-haired woman, then pulls back to show her from a distance.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman swings her knife to initiate an attack.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the back of the long-haired woman. The long-haired woman jumps upwards.", "visual_elements": [ { "content": "The camera cuts to the back of the long-haired woman.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman jumps upwards.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the short-haired woman. The short-haired woman swings her knife at the long-haired woman with both hands.", "visual_elements": [ { "content": "The camera cuts to a front view of the short-haired woman.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman swings her knife at the long-haired woman with both hands.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the back of the short-haired woman. The long-haired woman blocks the short-haired woman's attack with her sword held horizontally.", "visual_elements": [ { "content": "The camera cuts to the back of the short-haired woman.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman blocks the short-haired woman's attack with her sword held horizontally.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up of the short-haired woman's face, then to her back, and then back to her front. The long-haired woman suddenly ducks her head and drops her knife.", "visual_elements": [ { "content": "The camera cuts to a close-up of the short-haired woman's face, then to her back, and then back to her front.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman suddenly ducks her head and drops her knife.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the long-haired woman, then cuts to behind her. The long-haired woman holds her sword with both hands, raises it above her head, and swings it forward. The short-haired woman turns to the left of the scene.", "visual_elements": [ { "content": "The camera cuts to a front view of the long-haired woman, then to behind her.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman holds her sword with both hands.", "type": "attribute", "weight": 3 }, { "content": "The long-haired woman raises her sword above her head and swings it forward.", "type": "action", "weight": 3 }, { "content": "The short-haired woman turns to the left of the scene.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a front view of the long-haired woman, shouting ahead. The short-haired woman is on the left side of the long-haired woman.", "visual_elements": [ { "content": "The camera cuts to a front view of the long-haired woman.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman shouts ahead.", "type": "action", "weight": 3 }, { "content": "The short-haired woman is on the left side of the long-haired woman.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to the back of the long-haired woman. The long-haired woman is in the center of the scene, bending forward. The short-haired woman faces left towards the camera, swings her sword to the left of the scene, then turns her head to the right to look at the long-haired woman.", "visual_elements": [ { "content": "The camera cuts to the back of the long-haired woman.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman is in the center of the scene, bending forward.", "type": "action", "weight": 3 }, { "content": "The short-haired woman faces left towards the camera.", "type": "attribute", "weight": 2 }, { "content": "The short-haired woman swings her sword to the left of the scene, then turns her head to the right to look at the long-haired woman.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera cuts to a scene showing the long-haired woman from below her face.", "visual_elements": [ { "content": "Finally, the camera cuts to a scene showing the long-haired woman from below her face.", "type": "camera", "weight": 3 } ] } ], "n_events": 22, "n_elements": 57 }, { "index": "TUNA_0154", "video_path": "VELOCITI/v_-yzEjTjo2IA_seg_95_105.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, two women appear in the scene. In front of them stands a tree with green leaves, while behind them, there is an abandoned car and a gray wall.\nThe camera pans to the left, and the two women start running forward.\nNext, the camera cuts to a closer perspective, capturing the two women from the side and moving downwards to the left. They run fast towards the left side of the scene, where the light is dim.\nThe camera cuts again, showing a close-up of a bald man standing. The bald man is speaking facing forward. The background is blurred, and the lighting is dim. To his left front, a man dressed in armor stands, facing the camera and looking at the bald man. Then, the bald man turns his head to his left.\nThe camera cuts to a frontal view of the two women running. Behind them on the left are some trees, and in the distance, there is an abandoned truck.\nNext, the camera cuts to a closer perspective, where the woman on the left side of the scene falls forward to the ground. The woman on the right, dressed in a black tight-fitting outfit, bends down and uses both hands to help the fallen woman up. To their right are some tall weeds and two damaged cars.\nFinally, the camera cuts to a dimly lit tunnel and moves to the right. The scene is blurred, with a group of people running towards the camera one after another. On the left side of the scene are two burning fires, and on the right side of the scene, there is one burning fire.", "events": [ { "event": "At the beginning of the video, two women appear in the scene. In front of them stands a tree with green leaves, while behind them, there is an abandoned car and a gray wall.", "visual_elements": [ { "content": "Two women appear in the scene.", "type": "camera", "weight": 3 }, { "content": "In front of the two women, there is a tree with green leaves, and behind them, there is an abandoned car and a gray wall.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans to the left, and the two women start running forward.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The two women start running forward.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a closer perspective, capturing the two women from the side and moving downwards to the left. They run fast towards the left side of the scene, where the light is dim.", "visual_elements": [ { "content": "The camera cuts to a closer perspective, capturing the two women from the side and moving downwards to the left.", "type": "camera", "weight": 3 }, { "content": "The two women quickly run towards the left side of the scene.", "type": "action", "weight": 2 }, { "content": "The light on the left side of the scene is very dim.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts again, showing a close-up of a bald man standing. The bald man is speaking facing forward. The background is blurred, and the lighting is dim. To his left front, a man dressed in armor stands, facing the camera and looking at the bald man. Then, the bald man turns his head to his left.", "visual_elements": [ { "content": "The camera cuts again, showing a close-up of a bald man standing from the side.", "type": "action", "weight": 3 }, { "content": "The bald man speaks facing forward.", "type": "action", "weight": 3 }, { "content": "The background is blurred, and the lighting is dim.", "type": "attribute", "weight": 2 }, { "content": "A man dressed in armor stands, facing the camera and looking at the bald man.", "type": "attribute", "weight": 2 }, { "content": "The bald man turns his head to his left.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a frontal view of the two women running. Behind them on the left are some trees, and in the distance, there is an abandoned truck.", "visual_elements": [ { "content": "The camera cuts, capturing the two women running from the front.", "type": "camera", "weight": 3 }, { "content": "Behind the two women on the left are some trees, and in the distance, there is an abandoned truck.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts to a closer perspective, where the woman on the left side of the scene falls forward to the ground. The woman on the right, dressed in a black tight-fitting outfit, bends down and uses both hands to help the fallen woman up. To their right are some tall weeds and two damaged cars.", "visual_elements": [ { "content": "The camera cuts to a closer perspective.", "type": "camera", "weight": 3 }, { "content": "The woman on the left side of the scene falls forward to the ground.", "type": "action", "weight": 3 }, { "content": "The woman on the right, dressed in a black tight-fitting outfit, bends down and uses both hands to help the fallen woman up.", "type": "action", "weight": 3 }, { "content": "To the right of the two women are some tall weeds and two damaged cars.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera cuts to a dimly lit tunnel and moves to the right. The scene is blurred, with a group of people running towards the camera one after another. On the left side of the scene are two burning fires, and on the right side of the scene, there is one burning fire.", "visual_elements": [ { "content": "The camera cuts to a dimly lit tunnel and moves to the right.", "type": "camera", "weight": 3 }, { "content": "The scene is blurred.", "type": "attribute", "weight": 2 }, { "content": "A group of people run towards the camera, gradually approaching the camera.", "type": "action", "weight": 3 }, { "content": "On the left side of the scene are two burning fires, and on the right side, there is one burning fire.", "type": "scene", "weight": 2 } ] } ], "n_events": 7, "n_elements": 22 }, { "index": "TUNA_0155", "video_path": "VELOCITI/v_09iu9EiAtKA_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a coffee shop counter. In the background, there is a shelf with bottles and cans arranged on it. There are blue and purple curtains on both sides of the scene. A man wearing a black T-shirt, white apron, and black pants stands behind the counter, his right hand on his hip, looking towards his right front.\nThe camera pans slightly down and to the right. On the right side of the scene, a person walks forward with their back to the camera.\nThe camera cuts to a frontal view of the man and pans to the left. The man behind the counter turns his head to the left, looking ahead. A man in a black T-shirt stands in front of the counter with his back to the camera, pointing his left hand at the man behind the counter while speaking. He then moves to the right side of the scene and disappears from the scene. The man behind the counter turns his head to the left, following the man in front with his gaze. On the counter, there are items such as glass bottles with flowers, glass jars containing dark-colored food, and more.\nThe camera pans to the left. The man behind the counter turns to the left side of the scene and walks forward with his back to the camera.\nThe camera cuts to a new scene, showing a woman in a black T-shirt entering a room from outside and closing the door behind her. In the foreground, a man in a white coat lowers his head with his arms swinging.", "events": [ { "event": "At the beginning of the video, the camera focuses on a coffee shop counter. In the background, there is a shelf with bottles and cans arranged on it. There are blue and purple curtains on both sides of the scene. A man wearing a black T-shirt, white apron, and black pants stands behind the counter, his right hand on his hip, looking towards his right front.", "visual_elements": [ { "content": "The camera focuses on the counter of a shop.", "type": "camera", "weight": 3 }, { "content": "In the background, there is a shelf with bottles and cans arranged on it, and there are blue and purple curtains on both sides of the scene.", "type": "scene", "weight": 2 }, { "content": "A man dressed in a black T-shirt, white apron, and black pants stands behind the counter.", "type": "attribute", "weight": 3 }, { "content": "With his right hand on his hip, the man looks towards his right front.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera pans slightly down and to the right. On the right side of the scene, a person walks forward with their back to the camera.", "visual_elements": [ { "content": "The camera pans slightly down and to the right, revealing a person walking forward with their back to the camera on the right side of the scene.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a frontal view of the man and pans to the left. The man behind the counter turns his head to the left, looking ahead. A man in a black T-shirt stands in front of the counter with his back to the camera, pointing his left hand at the man behind the counter while speaking. He then moves to the right side of the scene and disappears from the scene. The man behind the counter turns his head to the left, following the man in front with his gaze. On the counter, there are items such as glass bottles with flowers, glass jars containing dark-colored food, and more.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the man and pans to the left.", "type": "camera", "weight": 3 }, { "content": "The man behind the counter turns his head to the left and looks ahead.", "type": "action", "weight": 3 }, { "content": "A man in a black T-shirt stands in front of the counter with his back to the camera, pointing his left hand at the man behind the counter while speaking.", "type": "action", "weight": 3 }, { "content": "The man in front of the counter then moves to the right side of the scene and disappears from the scene.", "type": "action", "weight": 2 }, { "content": "On the counter, there are items such as glass bottles with flowers, glass jars containing dark-colored food, and more.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans to the left. The man behind the counter turns to the left side of the scene and walks forward with his back to the camera.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The man behind the counter turns to the left side of the scene and walks forward with his back to the camera.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a new scene, showing a woman in a black T-shirt entering a room from outside and closing the door behind her. In the foreground, a man in a white coat lowers his head with his arms swinging.", "visual_elements": [ { "content": "The camera cuts to a new scene.", "type": "camera", "weight": 3 }, { "content": "A woman in a black T-shirt enters the room from outside, closes the door behind her, and walks in.", "type": "action", "weight": 3 }, { "content": "In the foreground, a man in a white coat lowers his head with his arms swinging.", "type": "action", "weight": 2 } ] } ], "n_events": 5, "n_elements": 15 }, { "index": "TUNA_0156", "video_path": "VELOCITI/v_0ShWGyC408I_seg_65_75.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the backdrop of an interior room. A tall young man wearing a black jacket stands on the left side of the scene, with a blonde woman in a red coat to his left. Both face a young man in a gray top. The camera, filming from behind and to the left of the gray-topped man, shows the upper torsos of the three individuals. The camera shakes and moves backward, and the tall man walks forward while talking. As he moves forward, the woman looks at the gray-topped man, waves her right hand forward, and speaks. The gray-topped man first looks at the woman and then turns his head to look at the tall man.\nThe camera cuts to a frontal view of the gray-topped man and pans down to the right, showing him pushing the tall man forward with his hands.\nThe camera cuts to behind the gray-topped man and moves forward. The gray-topped man grabs the tall man's collar from the front with both hands and pushes him forward until he collides with a refrigerator on the left side of the scene. The camera pans to the right. Subsequently, the gray-topped man drags the tall man to the right side of the scene. The tall man's head hits objects along the wall. The camera pans down to the left, and the gray-topped man pulls the tall man to the ground.\nThe camera then cuts to a lower perspective near the ground and moves upward. The gray-topped man pins the tall man down and punches his face with his right hand. Behind the two men, the woman quickly walks towards the camera.\nThe camera cuts to a close-up of a blonde little girl who covers her ears with both hands, glances to her right, and then lowers her head and closes her eyes.\nFinally, the camera cuts to the back of the woman, who bends down and reaches forward with both hands to grab the back of the gray-topped man's shirt.", "events": [ { "event": "The video begins with the backdrop of an interior room. A tall young man wearing a black jacket stands on the left side of the scene, with a blonde woman in a red coat to his left. Both face a young man in a gray top. The camera, filming from behind and to the left of the gray-topped man, shows the upper torsos of the three individuals. The camera shakes and moves backward, and the tall man walks forward while talking. As he moves forward, the woman looks at the gray-topped man, waves her right hand forward, and speaks. The gray-topped man first looks at the woman and then turns his head to look at the tall man.", "visual_elements": [ { "content": "The backdrop is the interior of a room.", "type": "scene", "weight": 2 }, { "content": "A tall young man wearing a black jacket stands on the left side of the scene.", "type": "attribute", "weight": 3 }, { "content": "To the left of the tall man is a blonde woman in a red coat.", "type": "attribute", "weight": 3 }, { "content": "Both face a young man in a gray shirt.", "type": "attribute", "weight": 3 }, { "content": "The camera captures the upper torsos of the three persons from behind and to the left of the gray-shirted man.", "type": "camera", "weight": 3 }, { "content": "The camera shakes and moves backward.", "type": "camera", "weight": 3 }, { "content": "The tall man walks forward while speaking.", "type": "action", "weight": 3 }, { "content": "The woman looks at the gray-shirted man, waves her right hand forward, and speaks.", "type": "action", "weight": 2 }, { "content": "The gray-shirted man first looks at the woman and then turns to look at the tall man.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a frontal view of the gray-topped man and pans down to the right, showing him pushing the tall man forward with his hands.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the gray-shirted man and moves down to the right.", "type": "camera", "weight": 3 }, { "content": "The gray-shirted man pushes the tall man forward with his hands.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to behind the gray-topped man and moves forward. The gray-topped man grabs the tall man's collar from the front with both hands and pushes him forward until he collides with a refrigerator on the left side of the scene. The camera pans to the right. Subsequently, the gray-topped man drags the tall man to the right side of the scene. The tall man's head hits objects along the wall. The camera pans down to the left, and the gray-topped man pulls the tall man to the ground.", "visual_elements": [ { "content": "The camera cuts to behind the gray-shirted man and moves forward.", "type": "camera", "weight": 3 }, { "content": "The gray-shirted man grabs the tall man's collar from the front with both hands and pushes him forward.", "type": "action", "weight": 3 }, { "content": "The tall man collides with a refrigerator on the left side of the scene.", "type": "action", "weight": 2 }, { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The gray-shirted man drags the tall man to the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The tall man's head hits objects along the wall.", "type": "action", "weight": 2 }, { "content": "The camera pans down to the left.", "type": "camera", "weight": 3 }, { "content": "The gray-shirted man pulls the tall man to the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a lower perspective near the ground and moves upward. The gray-topped man pins the tall man down and punches his face with his right hand. Behind the two men, the woman quickly walks towards the camera.", "visual_elements": [ { "content": "The camera cuts to a lower perspective near the ground and moves upward.", "type": "camera", "weight": 3 }, { "content": "The gray-shirted man pins the tall man down and punches his face with his right hand.", "type": "action", "weight": 3 }, { "content": "Behind the two men, the woman quickly walks towards the camera.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a close-up of a blonde little girl who covers her ears with both hands, glances to her right, and then lowers her head and closes her eyes.", "visual_elements": [ { "content": "The camera cuts to a close-up of a blonde little girl, showing her covering her ears with both hands.", "type": "camera", "weight": 3 }, { "content": "The little girl glances to her right and then lows her head, closing her eyes.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to the back of the woman, who bends down and reaches forward with both hands to grab the back of the gray-topped man's shirt.", "visual_elements": [ { "content": "The camera finally cuts to the back of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman bends down and reaches forward with both hands to grab the back of the gray-shirted man's shirt.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 26 }, { "index": "TUNA_0157", "video_path": "VELOCITI/v_0ShWGyC408I_seg_85_95.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the front of a young man in a gray shirt and pans down to the left, where the gray-shirted man tackles another person to the ground. The backdrop is the interior of a room, with an old-fashioned television on the left side of the scene.\nThe camera cuts to the back of the gray-shirted man and shakes. The gray-shirted man pins down a young man lying face-up on the ground. The fallen man looks distressed, turning his head to the right.\nThe camera cuts to a blonde little girl leaning against the wall with her hands covering her ears.\nThe camera cuts to a blonde young woman in a red shirt, filmed from the front. She leans forward, supporting herself on the table with her left hand, and shouts loudly, looking deeply sad.\nThe camera cuts to the fallen man, filmed from above the right side of the man. He wears a dark coat. The camera pans up slightly, and the dark-coated man lifts his head to look forward. Suddenly, the gray-shirted man swings his right fist, hitting the left face of the fallen dark-coated man, who falls again.\nThe camera cuts to the upper torso of the woman, showing her body leaning forward.\nThe camera cuts, filming the gray-shirted man from the front. The woman pulls him backward with both hands, and he rises while striking the dark-coated man's face again with his right hand.\nThe camera cuts to show the gray-shirted man falling backward, and the woman behind him also falls to the ground.\nThe camera cuts to the dark-coated man, filmed from above and to the right. He turns his face to his right, with his right hand on his left cheek. He drops his hand.\nFinally, the camera cuts to a ground-level angle, filming the dark-coated man from behind to show his movement as he rises. In front of him, the gray-shirted man sits on the ground, watching him.", "events": [ { "event": "At the beginning of the video, the camera focuses on the front of a young man in a gray shirt and pans down to the left, where the gray-shirted man tackles another person to the ground. The backdrop is the interior of a room, with an old-fashioned television on the left side of the scene.", "visual_elements": [ { "content": "The camera focuses on the front of a young man in a gray shirt and pans down to the left.", "type": "camera", "weight": 3 }, { "content": "The gray-shirted man tackles another person to the ground.", "type": "action", "weight": 3 }, { "content": "The backdrop is the interior of a room, with an old-fashioned television on the left side of the scene.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to the back of the gray-shirted man and shakes. The gray-shirted man pins down a young man lying face-up on the ground. The fallen man looks distressed, turning his head to the right.", "visual_elements": [ { "content": "The camera cuts to the back of the gray-shirted man and shakes.", "type": "camera", "weight": 3 }, { "content": "The gray-shirted man pins down a young man lying face-up on the ground.", "type": "action", "weight": 3 }, { "content": "The fallen man looks distressed.", "type": "attribute", "weight": 3 }, { "content": "The fallen man turns his head to the right.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a blonde little girl leaning against the wall with her hands covering her ears.", "visual_elements": [ { "content": "The camera cuts to a blonde little girl.", "type": "camera", "weight": 3 }, { "content": "The little girl leans against the wall with her hands covering her ears.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to a blonde young woman in a red shirt, filmed from the front. She leans forward, supporting herself on the table with her left hand, and shouts loudly, looking deeply sad.", "visual_elements": [ { "content": "The camera cuts to a blonde young woman in a red shirt, filmed from the front.", "type": "camera", "weight": 3 }, { "content": "The woman leans on the table with her left hand.", "type": "attribute", "weight": 3 }, { "content": "The woman leans forward and shouts loudly.", "type": "action", "weight": 3 }, { "content": "The woman looks deeply sad.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to the fallen man, filmed from above the right side of the man. He wears a dark coat. The camera pans up slightly, and the dark-coated man lifts his head to look forward. Suddenly, the gray-shirted man swings his right fist, hitting the left face of the fallen dark-coated man, who falls again.", "visual_elements": [ { "content": "The camera cuts to the fallen man, filmed from above and to the right.", "type": "camera", "weight": 3 }, { "content": "The fallen man wears a dark coat.", "type": "attribute", "weight": 2 }, { "content": "The camera pans up slightly.", "type": "camera", "weight": 3 }, { "content": "The dark-coated man lifts his head and looks forward.", "type": "action", "weight": 3 }, { "content": "Suddenly, the gray-shirted man swings his right fist, hitting the left face of the fallen dark-coated man.", "type": "action", "weight": 3 }, { "content": "The gray-shirted man falls again.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the upper torso of the woman, showing her body leaning forward.", "visual_elements": [ { "content": "The camera cuts to the upper torso of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman leans forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts, filming the gray-shirted man from the front. The woman pulls him backward with both hands, and he rises while striking the dark-coated man's face again with his right hand.", "visual_elements": [ { "content": "The camera cuts, filming the gray-shirted man from the front.", "type": "camera", "weight": 3 }, { "content": "The woman pulls the gray-shirted man backward with both hands.", "type": "action", "weight": 2 }, { "content": "The gray-shirted man rises while striking the dark-coated man's face again with his right hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to show the gray-shirted man falling backward, and the woman behind him also falls to the ground.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The gray-shirted man falls backward.", "type": "action", "weight": 3 }, { "content": "The woman behind him also falls to the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the dark-coated man, filmed from above and to the right. He turns his face to his right, with his right hand on his left cheek. He drops his hand.", "visual_elements": [ { "content": "The camera cuts to the dark-coated man, filmed from above and to the right.", "type": "camera", "weight": 3 }, { "content": "The dark-coated man turns his face to his right, with his right hand on his left cheek.", "type": "attribute", "weight": 3 }, { "content": "The dark-coated man drops his hand.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera cuts to a ground-level angle, filming the dark-coated man from behind to show his movement as he rises. In front of him, the gray-shirted man sits on the ground, watching him.", "visual_elements": [ { "content": "Finally, the camera cuts to a ground-level angle, filming the dark-coated man from behind to show his movement as he rises.", "type": "camera", "weight": 3 }, { "content": "In front of the dark-coated man, the gray-shirted man sits on the ground, watching him.", "type": "attribute", "weight": 2 } ] } ], "n_events": 10, "n_elements": 32 }, { "index": "TUNA_0158", "video_path": "VELOCITI/v_0mmSi-63Y9U_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young man, filmed from his right side. The man wears a light green T-shirt and sits on a toilet. The backdrop is a bathroom with light yellow tiled walls. There is also a sink on the left side of the scene. The man leans slightly forward, lifting and then lowering his hands while speaking.\nThe camera cuts. A woman lies in a bathtub filled with lots of white bubbles. The woman has long black hair and looks up to her right before starting to speak.\nFinally, the camera cuts back to the man. He touches the skin next to his eyes with both hands, then clenches his fists and lets them go. The man arches his chest, then shrugs his shoulders before letting them drop.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young man, filmed from his right side. The man wears a light green T-shirt and sits on a toilet. The backdrop is a bathroom with light yellow tiled walls. There is also a sink on the left side of the scene. The man leans slightly forward, lifting and then lowering his hands while speaking.", "visual_elements": [ { "content": "The camera focuses on a young man, filmed from his right side.", "type": "camera", "weight": 3 }, { "content": "The man wears a light green T-shirt and sits on a toilet.", "type": "attribute", "weight": 3 }, { "content": "The backdrop is a bathroom.", "type": "scene", "weight": 2 }, { "content": "The bathroom is decorated with light yellow tiled walls.", "type": "scene", "weight": 2 }, { "content": "There is also a sink on the left side of the scene.", "type": "scene", "weight": 1 }, { "content": "The man leans slightly forward.", "type": "attribute", "weight": 2 }, { "content": "He lifts and then lowers his hands while speaking.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts. A woman lies in a bathtub filled with lots of white bubbles. The woman has long black hair and looks up to her right before starting to speak.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The woman lies in a bathtub filled with white bubbles.", "type": "attribute", "weight": 3 }, { "content": "The woman has long black hair.", "type": "attribute", "weight": 3 }, { "content": "The woman looks up to her right and starts to speak.", "type": "attribute", "weight": 3 } ] }, { "event": "Finally, the camera cuts back to the man. He touches the skin next to his eyes with both hands, then clenches his fists and lets them go. The man arches his chest, then shrugs his shoulders before letting them drop.", "visual_elements": [ { "content": "The camera cuts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man touches the skin next to his eyes with both hands.", "type": "action", "weight": 3 }, { "content": "The man clenches his fists and then lets them go.", "type": "action", "weight": 3 }, { "content": "The man arches his chest, shrugs his shoulders, and then looses his shoulders.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0159", "video_path": "VELOCITI/v_1CDlBLvc3YE_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera captures an upward view of a staircase. A middle-aged man dressed in a brown suit with a striped tie is slowly walking up the stairs, holding the railing with his left hand and his right hand hanging naturally. He then lets go of the railing. The staircase is dimly lit.\nThe camera cuts to a revolving door, with a \"Flower Shop\" sign above the wall on the right side. Two men and a woman are inside the revolving door, facing away from the camera. Behind the revolving door, a man walks forward facing away from the camera. He then reaches out his right hand and takes something from the top of the revolving door. The revolving door stops, and a man dressed in a white suit is trapped inside, turning to his left. In the background, a black car is parked outside the revolving door, and a man wearing a hat opens the rear door of the car. A hand sticks out of the front window of the car, reaching upwards.\nThe camera cuts to a front view of an elderly man dressed in a white suit with a black bow tie. A flower is pinned to the left side of his suit, and he looks worried and tense. From the left side of the scene, a hand holding a black handgun enters the scene.\nThe camera cuts to a front view of the gunman, who is a middle-aged man dressed in a brown suit.\nThe camera cuts back to the man in the white suit. He leans forward and shouts in terror. With his right hand, he slaps the door. The gunman in front of him fires a shot, causing cracks to appear in part of the glass on the revolving door. The man in the white suit leans backward while shouting, and the gunman fires a second shot, causing blood to pump out of the left chest of the man in the white suit.\nFinally, the camera cuts back to the man in the brown suit, who looks very menacing.", "events": [ { "event": "At the beginning of the video, the camera captures an upward view of a staircase. A middle-aged man dressed in a brown suit with a striped tie is slowly walking up the stairs, holding the railing with his left hand and his right hand hanging naturally. He then lets go of the railing. The staircase is dimly lit.", "visual_elements": [ { "content": "The camera captures a downward view of an upward staircase.", "type": "camera", "weight": 3 }, { "content": "A middle-aged man dressed in a brown suit with a striped tie holds the stair railing with his left hand, while his right hand hangs naturally at his side.", "type": "attribute", "weight": 2 }, { "content": "The man in the brown suit is slowly walking up the stairs.", "type": "action", "weight": 3 }, { "content": "Then, the left hand of the man in the brown suit leaves the stair railing.", "type": "action", "weight": 2 }, { "content": "The staircase is dimly lit.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a revolving door, with a \"Flower Shop\" sign above the wall on the right side. Two men and a woman are inside the revolving door, facing away from the camera. Behind the revolving door, a man walks forward facing away from the camera. He then reaches out his right hand and takes something from the top of the revolving door. The revolving door stops, and a man dressed in a white suit is trapped inside, turning to his left. In the background, a black car is parked outside the revolving door, and a man wearing a hat opens the rear door of the car. A hand sticks out of the front window of the car, reaching upwards.", "visual_elements": [ { "content": "The camera cuts to focus on a revolving door.", "type": "camera", "weight": 3 }, { "content": "Above the wall on the right side of the door is a \"Flower Shop\" sign.", "type": "attribute", "weight": 2 }, { "content": "Two men and a woman are inside the revolving door, facing away from the camera.", "type": "attribute", "weight": 2 }, { "content": "Behind the revolving door, a man walks forward facing away from the camera.", "type": "action", "weight": 3 }, { "content": "The man then reaches out his right hand and takes something from the top of the revolving door.", "type": "action", "weight": 3 }, { "content": "The revolving door stops, trapping a man dressed in a white suit inside.", "type": "attribute", "weight": 3 }, { "content": "The man in the white suit then turns to the left side of the scene.", "type": "action", "weight": 3 }, { "content": "Outside the revolving door, a black car is parked.", "type": "scene", "weight": 2 }, { "content": "A man wearing a hat opens the rear door of the car.", "type": "scene", "weight": 1 }, { "content": "A hand sticks out of the front window of the car, reaching upwards.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to a front view of an elderly man dressed in a white suit with a black bow tie. A flower is pinned to the left side of his suit, and he looks worried and tense. From the left side of the scene, a hand holding a black handgun enters the scene.", "visual_elements": [ { "content": "The camera cuts to a front view of an elderly man.", "type": "camera", "weight": 3 }, { "content": "The elderly man wears a white suit with a black bow tie, and a flower pinned to the left side of his suit.", "type": "attribute", "weight": 3 }, { "content": "The man in the white suit looks worried and tense.", "type": "attribute", "weight": 3 }, { "content": "In the front of the man in a white suit, a hand holding a black handgun enters from the left side of the scene.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the gunman, who is a middle-aged man dressed in a brown suit.", "visual_elements": [ { "content": "The camera cuts to a front view of the gunman.", "type": "camera", "weight": 3 }, { "content": "The gunman is a middle-aged man dressed in a brown suit.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts back to the man in the white suit. He leans forward and shouts in terror. With his right hand, he slaps the door. The gunman in front of him fires a shot, causing cracks to appear in part of the glass on the revolving door. The man in the white suit leans backward while shouting, and the gunman fires a second shot, causing blood to pump out of the left chest of the man in the white suit.", "visual_elements": [ { "content": "The camera cuts back to the man in the white suit.", "type": "camera", "weight": 3 }, { "content": "The man in the white suit leans forward and shouts in terror.", "type": "action", "weight": 3 }, { "content": "With his right hand, the man in the white suit slaps the door.", "type": "action", "weight": 2 }, { "content": "The gunman in front of him fires a shot.", "type": "action", "weight": 3 }, { "content": "Cracks appear in part of the glass on the revolving door.", "type": "attribute", "weight": 3 }, { "content": "The man in the white suit leans backward while shouting.", "type": "action", "weight": 3 }, { "content": "Then, the gunman fires a second shot.", "type": "action", "weight": 3 }, { "content": "Blood pumps out of the left chest of the man in the white suit.", "type": "attribute", "weight": 3 } ] }, { "event": "Finally, the camera cuts back to the man in the brown suit, who looks very menacing.", "visual_elements": [ { "content": "The camera finally cuts to the man in the brown suit.", "type": "camera", "weight": 3 }, { "content": "The man in the brown suit looks very menacing.", "type": "attribute", "weight": 3 } ] } ], "n_events": 6, "n_elements": 31 }, { "index": "TUNA_0160", "video_path": "VELOCITI/v_1Ltz-vQPqgo_seg_95_105.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a topless man standing in a dimly lit room. The background is blurred, with a man and two women standing at the doorway. The topless man turns to his left to look at the three people. The background becomes clear. To the right front of the topless man, on the far left of the scene, is a middle-aged, overweight man dressed in a gray suit with a striped tie, holding something in his left hand. To the left of the gray-suited man is a blonde woman wearing a white top, and on the far right of the scene is a woman dressed in a white lab coat. The door behind them closes. Upon seeing the topless man, the woman in the white lab coat steps to her right, then lowers her head, stretching her left hand up to try to cover her face.\nThe camera cuts to a front view of the topless man, showing his upper torso. In the background, several bodies wrapped in plastic are placed on metal beds. The man speaks facing the camera, glances down, then quickly ducks and retreats. The camera pans to the right.\nThe camera cuts again and moves slightly to the right, revealing the man completely naked. Covering his private area, he bends forward and retreats while glancing around.\nThe camera cuts to a view from the right rear of the naked man. He turns his head to his right and sees an empty metal bed with white sheets and plastic coverings.\nThen the camera cuts to a view from the left front of the naked man. He turns his head to the left and excitedly speaks towards the front.", "events": [ { "event": "At the beginning of the video, the camera focuses on a topless man standing in a dimly lit room. The background is blurred, with a man and two women standing at the doorway. The topless man turns to his left to look at the three people. The background becomes clear. To the right front of the topless man, on the far left of the scene, is a middle-aged, overweight man dressed in a gray suit with a striped tie, holding something in his left hand. To the left of the gray-suited man is a blonde woman wearing a white top, and on the far right of the scene is a woman dressed in a white lab coat. The door behind them closes. Upon seeing the topless man, the woman in the white lab coat steps to her right, then lowers her head, stretching her left hand up to try to cover her face.", "visual_elements": [ { "content": "The camera focuses on a topless man.", "type": "camera", "weight": 3 }, { "content": "The man stands in a dimly lit room.", "type": "attribute", "weight": 3 }, { "content": "The background is blurred, with a man and two women standing at the doorway.", "type": "scene", "weight": 2 }, { "content": "The topless man turns to his left to look at the three people.", "type": "action", "weight": 3 }, { "content": "The background becomes clear.", "type": "attribute", "weight": 2 }, { "content": "At the doorway, on the far left, is a middle-aged, overweight man dressed in a gray suit with a striped tie.", "type": "attribute", "weight": 3 }, { "content": "The gray-suited man holds a folder in his left hand.", "type": "attribute", "weight": 3 }, { "content": "To the left of the gray-suited man is a blonde woman wearing a white top.", "type": "attribute", "weight": 3 }, { "content": "On the far right of the scene is a woman dressed in a white lab coat.", "type": "attribute", "weight": 3 }, { "content": "The door behind them closes.", "type": "scene", "weight": 2 }, { "content": "Upon seeing the topless man, the woman in the white lab coat steps to her right, then lowers her head, stretching her left hand up to try to cover her face.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the topless man, showing his upper torso. In the background, several bodies wrapped in plastic are placed on metal beds. The man speaks facing the camera, glances down, then quickly ducks and retreats. The camera pans to the right.", "visual_elements": [ { "content": "The camera cuts to a front view of the topless man, showing his upper torso.", "type": "camera", "weight": 3 }, { "content": "In the background, several bodies wrapped in plastic are placed on metal beds.", "type": "scene", "weight": 2 }, { "content": "The topless man speaks facing the camera, glances down, then quickly ducks and retreats.", "type": "action", "weight": 3 }, { "content": "The camera pans to the right.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts again and moves slightly to the right, revealing the man completely naked. Covering his private area, he bends forward and retreats while glancing around.", "visual_elements": [ { "content": "The camera cuts again and moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The man is naked from the waist up and also has no clothes on his lower body.", "type": "attribute", "weight": 3 }, { "content": "Covering his private area, he bends forward and retreats while glancing around.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a view from the right rear of the naked man. He turns his head to his right and sees an empty metal bed with white sheets and plastic coverings.", "visual_elements": [ { "content": "The camera cuts to a view from the right rear of the naked man.", "type": "camera", "weight": 3 }, { "content": "The naked man turns his head to his right.", "type": "action", "weight": 3 }, { "content": "To the right of the naked man is an empty metal bed with white sheets and plastic coverings.", "type": "attribute", "weight": 2 } ] }, { "event": "Then the camera cuts to a view from the left front of the naked man. He turns his head to the left and excitedly speaks towards the front.", "visual_elements": [ { "content": "The camera cuts to a view from the left front of the naked man.", "type": "camera", "weight": 3 }, { "content": "The naked man turns his head to the left and excitedly speaks towards the front.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 23 }, { "index": "TUNA_0161", "video_path": "VELOCITI/v_1YrG1iLwEWk_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the front of a young woman with long hair. She wears a brown short-sleeved shirt and stands next to a public phone booth, holding the phone receiver in her right hand. A child stands on her right side, facing away from the camera. The camera pans backward as the woman walks forward and speaks. The child walks to the left of the scene and disappears from the scene. The background is blurred, indicating it's nighttime.\nThe camera cuts to a front view of a middle-aged man. He wears a dark coat and sits in the driver's seat of a car. He holds a mobile phone in his left hand and grips the steering wheel with his right, looking ahead while speaking.\nThe camera cuts back to the woman, who is talking on the phone receiver next to her ear.\nThe camera cuts back to the man driving, filmed from a lower right perspective of the man. The man speaks excitedly.", "events": [ { "event": "At the beginning of the video, the camera focuses on the front of a young woman with long hair. She wears a brown short-sleeved shirt and stands next to a public phone booth, holding the phone receiver in her right hand. A child stands on her right side, facing away from the camera. The camera pans backward as the woman walks forward and speaks. The child walks to the left of the scene and disappears from the scene. The background is blurred, indicating it's nighttime.", "visual_elements": [ { "content": "The camera focuses on the front of a young woman with long hair wearing a brown short-sleeved shirt.", "type": "camera", "weight": 3 }, { "content": "The woman stands next to a public phone booth, holding the phone receiver in her right hand.", "type": "attribute", "weight": 3 }, { "content": "A child stands on her right side, facing away from the camera.", "type": "attribute", "weight": 2 }, { "content": "The camera pans backward.", "type": "camera", "weight": 3 }, { "content": "The woman walks forward and speaks.", "type": "action", "weight": 3 }, { "content": "The child walks to the left of the scene and then disappears from the scene.", "type": "action", "weight": 2 }, { "content": "The background is blurred, indicating it's nighttime.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a front view of a middle-aged man. He wears a dark coat and sits in the driver's seat of a car. He holds a mobile phone in his left hand and grips the steering wheel with his right, looking ahead while speaking.", "visual_elements": [ { "content": "The camera cuts to a front view of a middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The man wears a dark coat and sits in the driver's seat of a car.", "type": "attribute", "weight": 3 }, { "content": "He holds a mobile phone in his left hand and grips the steering wheel with his right, looking ahead.", "type": "attribute", "weight": 3 }, { "content": "The man is speaking.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the woman, who is talking on the phone receiver next to her ear.", "visual_elements": [ { "content": "The camera cuts back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman talks on the phone receiver next to her ear.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the man driving, filmed from a lower right perspective of the man. The man speaks excitedly.", "visual_elements": [ { "content": "The camera cuts back to the man driving, filmed from a lower right perspective of the man.", "type": "camera", "weight": 3 }, { "content": "The man speaks excitedly.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 15 }, { "index": "TUNA_0162", "video_path": "VELOCITI/v_1YrG1iLwEWk_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young woman with long hair and then shakes to the left. She wears a brown short-sleeved shirt and looks to her right. The background is blurred and dimly lit, with a red car parked behind the woman on her right.\nNext, the camera cuts. On the right side of the scene, a silver car and the red car are parked by the roadside. On the right side of the scene, two men wearing bucket hats look to the right of the scene. The camera shakes, and the silver car starts moving forward.\nThe camera cuts to behind the woman and then zooms in on her expression. The woman quickly turns her head backward. In front of her is a public phone.\nSubsequently, the camera cuts to a shot of the woman from a distance, capturing her front view. The woman runs forward in terror, and the camera pans backward and then to the right. In the background, there is a grocery store with transparent windows, filled with various items. The woman shouts out a sentence.\nThe camera cuts to the rear of a car with its tail lights on, driving forward away from the camera. Inside the rear window, the faces of two people can be seen. The camera shakes and pans to the left as the car passes, causing many leaves on the road to fly up.\nThe camera cuts again, showing the woman running forward. The background is a gas station with multiple cars parked. Inside the gas station, there is a 24-hour convenience store. In the background, several people look at the woman.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young woman with long hair and then shakes to the left. She wears a brown short-sleeved shirt and looks to her right. The background is blurred and dimly lit, with a red car parked behind the woman on her right.", "visual_elements": [ { "content": "The camera focuses on a young woman with long hair and then shakes slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The woman wears a brown short-sleeved shirt and looks to her right.", "type": "attribute", "weight": 3 }, { "content": "The background is blurred and dimly lit.", "type": "scene", "weight": 2 }, { "content": "A red car is parked behind the woman on her right.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts. On the right side of the scene, a silver car and the red car are parked by the roadside. On the right side of the scene, two men wearing bucket hats look to the right of the scene. The camera shakes, and the silver car starts moving forward.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "On the right side of the scene, a silver car and a red car are parked roadside, one in front of the other.", "type": "scene", "weight": 2 }, { "content": "On the right side of the scene, two men wearing bucket hats look to the right of the scene.", "type": "attribute", "weight": 3 }, { "content": "The camera shakes.", "type": "camera", "weight": 3 }, { "content": "The silver car starts moving forward.", "type": "action", "weight": 1 } ] }, { "event": "The camera cuts to behind the woman and then zooms in on her expression. The woman quickly turns her head backward. In front of her is a public phone.", "visual_elements": [ { "content": "The camera cuts to behind the woman and then zooms in for a close-up of her expression.", "type": "camera", "weight": 3 }, { "content": "The woman quickly turns her head backward.", "type": "action", "weight": 3 }, { "content": "In front of the woman is a public phone.", "type": "attribute", "weight": 2 } ] }, { "event": "Subsequently, the camera cuts to a shot of the woman from a distance, capturing her front view. The woman runs forward in terror, and the camera pans backward and then to the right. In the background, there is a grocery store with transparent windows, filled with various items. The woman shouts out a sentence.", "visual_elements": [ { "content": "The camera cuts to a shot of the woman from a distance, capturing her front view.", "type": "camera", "weight": 3 }, { "content": "The woman runs forward in terror.", "type": "action", "weight": 3 }, { "content": "The camera pans backward and then to the right.", "type": "camera", "weight": 3 }, { "content": "In the background, there is a grocery store with transparent windows, filled with various items.", "type": "scene", "weight": 2 }, { "content": "The woman shouts out a sentence.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to the rear of a car with its tail lights on, driving forward away from the camera. Inside the rear window, the faces of two people can be seen. The camera shakes and pans to the left as the car passes, causing many leaves on the road to fly up.", "visual_elements": [ { "content": "The camera cuts to the rear of a car with its tail lights on.", "type": "camera", "weight": 3 }, { "content": "The car with tail lights on drives forward away from the camera.", "type": "action", "weight": 3 }, { "content": "Inside the rear window of the car, the faces of two people can be seen.", "type": "attribute", "weight": 2 }, { "content": "The camera shakes and pans to the left.", "type": "camera", "weight": 3 }, { "content": "As the car passes, many leaves on the road fly up.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts again, showing the woman running forward. The background is a gas station with multiple cars parked. Inside the gas station, there is a 24-hour convenience store. In the background, several people look at the woman.", "visual_elements": [ { "content": "The camera finally cuts back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman runs forward.", "type": "action", "weight": 2 }, { "content": "The background is a gas station with multiple cars parked.", "type": "scene", "weight": 2 }, { "content": "Inside the gas station, there is a 24-hour convenience store.", "type": "scene", "weight": 2 }, { "content": "In the background, several people look at the woman.", "type": "scene", "weight": 2 } ] } ], "n_events": 6, "n_elements": 27 }, { "index": "TUNA_0163", "video_path": "VELOCITI/v_1cCEE8-jhus_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "As the video begins, the camera focuses on a kitchen scene. Inside the kitchen, there are white cabinets, a stove, and a refrigerator. In the foreground, a middle-aged man dressed in a dark coat sits at a dark dining table, with his hands resting on a colorful checkered tablecloth laid on the table. A round dish piled with condiments and cups sits in the middle of the table. In the background, an elderly woman with curly hair, dressed in a light pink coat and gray pants, stands in front of the refrigerator. Her right hand grips the refrigerator handle, while her left hand pulls out an item from the fridge. After that, she closes the fridge door with her right hand.\nSimultaneously, the man lifts his hands up slightly and then places them on his legs. He finally turns his head to the left to look at the woman. The woman turns to her left, holding the item in her left hand. With her right hand, she opens a white cabinet located higher up, while her left hand's two fingers press against the cabinet door. She retrieves a white bowl with her right hand and then closes the cabinet door with her left hand holding it.\nThe woman turns towards the camera and walks forward, smiling at the man. The man turns his head to the right, directing his gaze towards the table. With her right hand, the woman places the white bowl on the table. In her left hand, she carries a bag half-filled with food. She opens the bag and prepares to pour its contents into the white bowl.", "events": [ { "event": "As the video begins, the camera focuses on a kitchen scene. Inside the kitchen, there are white cabinets, a stove, and a refrigerator. In the foreground, a middle-aged man dressed in a dark coat sits at a dark dining table, with his hands resting on a colorful checkered tablecloth laid on the table. A round dish piled with condiments and cups sits in the middle of the table. In the background, an elderly woman with curly hair, dressed in a light pink coat and gray pants, stands in front of the refrigerator. Her right hand grips the refrigerator handle, while her left hand pulls out an item from the fridge. After that, she closes the fridge door with her right hand.\nSimultaneously, the man lifts his hands up slightly and then places them on his legs. He finally turns his head to the left to look at the woman. The woman turns to her left, holding the item in her left hand. With her right hand, she opens a white cabinet located higher up, while her left hand's two fingers press against the cabinet door. She retrieves a white bowl with her right hand and then closes the cabinet door with her left hand holding it.\nThe woman turns towards the camera and walks forward, smiling at the man. The man turns his head to the right, directing his gaze towards the table. With her right hand, the woman places the white bowl on the table. In her left hand, she carries a bag half-filled with food. She opens the bag and prepares to pour its contents into the white bowl.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "In the kitchen, there are white cabinets, a stove, and a refrigerator.", "type": "scene", "weight": 2 }, { "content": "A middle-aged man wearing a dark coat sits at a dark dining table.", "type": "attribute", "weight": 3 }, { "content": "The man places his hands on a colorful checkered tablecloth laid on the table.", "type": "attribute", "weight": 3 }, { "content": "There are also some condiments and cups on the table.", "type": "scene", "weight": 2 }, { "content": "In front of the refrigerator stands an elderly woman with curly hair, wearing a light pink coat and gray pants.", "type": "attribute", "weight": 3 }, { "content": "The woman holds the refrigerator handle with her right hand.", "type": "attribute", "weight": 3 }, { "content": "With her left hand, the woman takes an item out of the refrigerator and then closes the door with her right hand.", "type": "action", "weight": 3 }, { "content": "The man lifts his hands up and then places them on his legs, finally turning his head to the left to look at the woman.", "type": "action", "weight": 3 }, { "content": "The woman turns to her left.", "type": "action", "weight": 3 }, { "content": "In her left hand, the woman holds an item.", "type": "attribute", "weight": 3 }, { "content": "With her right hand, the woman opens a white cabinet located at a higher position.", "type": "action", "weight": 3 }, { "content": "The two fingers of the woman's left hand press against the cabinet door.", "type": "attribute", "weight": 2 }, { "content": "With her right hand, the woman takes out a white bowl from the cabinet, and then her left hand closes the door with holding it.", "type": "action", "weight": 3 }, { "content": "The woman turns towards the camera and walks forward.", "type": "action", "weight": 3 }, { "content": "The woman smiles at the man.", "type": "attribute", "weight": 3 }, { "content": "The man turns his head to the right, directing his gaze towards the table.", "type": "action", "weight": 2 }, { "content": "With her right hand, the woman places the white bowl on the table.", "type": "action", "weight": 3 }, { "content": "In her left hand, the woman carries a bag half-filled with food.", "type": "attribute", "weight": 3 }, { "content": "The woman opens the bag and prepares to pour its contents into the white bowl.", "type": "action", "weight": 3 } ] } ], "n_events": 1, "n_elements": 20 }, { "index": "TUNA_0164", "video_path": "VELOCITI/v_1e_9GirqmoI_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "As the video begins, the camera focuses on a man, filmed from his left front. The man rides a blue motorcycle along the street. He wears a dark green coat with a bowtie tied around his neck. A dark-colored bag is placed on the back seat of the motorcycle, and a pole with a red flag is inserted at the back of the bag. The background features a row of brick residential buildings, with green lawns and trees in front. The camera pans backward as the man shouts ahead and then turns his head to the left.\nThe camera cuts to a police car, filmed from its right front, and then the camera rotates to the right. The police car's lights flash as it moves forward. A person dressed in black lies on the hood of the police car. The person's left hand is fisted, knocking on the police car's windshield. The background is a wide street, with a parking lot filled with cars on one side.\nThe camera cuts back to the man riding the motorcycle, films him from his front, then shakes and moves to the right. Behind him on the left, a police car speeds forward, leaving the motorcycle behind. Meanwhile, the motorcycle rider glances to his left, quickly turns his head and lows it, and finally looks ahead to his left. Behind the man on the motorcycle, there is another police car. In the background, there are other vehicles traveling on the road.", "events": [ { "event": "As the video begins, the camera focuses on a man, filmed from his left front. The man rides a blue motorcycle along the street. He wears a dark green coat with a bowtie tied around his neck. A dark-colored bag is placed on the back seat of the motorcycle, and a pole with a red flag is inserted at the back of the bag. The background features a row of brick residential buildings, with green lawns and trees in front. The camera pans backward as the man shouts ahead and then turns his head to the left.", "visual_elements": [ { "content": "The camera focuses on a man, filming from the left front of him.", "type": "camera", "weight": 3 }, { "content": "The man rides a blue motorcycle along the street.", "type": "action", "weight": 3 }, { "content": "The man wears a dark green coat with a bow tie around his neck.", "type": "attribute", "weight": 3 }, { "content": "On the back seat of the motorcycle, there is a dark bag with a pole with a red flag inserted behind it.", "type": "attribute", "weight": 2 }, { "content": "The background features a row of brick residential buildings with green lawns and trees in front.", "type": "scene", "weight": 2 }, { "content": "The camera pans backward.", "type": "camera", "weight": 3 }, { "content": "The man shouts ahead and then turns his head to his left.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a police car, filmed from its right front, and then the camera rotates to the right. The police car's lights flash as it moves forward. A person dressed in black lies on the hood of the police car. The person's left hand is fisted, knocking on the police car's windshield. The background is a wide street, with a parking lot filled with cars on one side.", "visual_elements": [ { "content": "The camera cuts to a police car, films from the right front of it, then rotates to the right.", "type": "camera", "weight": 3 }, { "content": "The police car's lights flash as it moves forward.", "type": "action", "weight": 3 }, { "content": "A person dressed in black lies on the hood of the police car.", "type": "attribute", "weight": 3 }, { "content": "The person lying on the hood raises their left fist, knocking on the police car's windshield.", "type": "action", "weight": 3 }, { "content": "The background is a wide street.", "type": "scene", "weight": 2 }, { "content": "On one side of the street, there is a parking lot with many cars parked.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts back to the man riding the motorcycle, films him from his front, then shakes and moves to the right. Behind him on the left, a police car speeds forward, leaving the motorcycle behind. Meanwhile, the motorcycle rider glances to his left, quickly turns his head and lows it, and finally looks ahead to his left. Behind the man on the motorcycle, there is another police car. In the background, there are other vehicles traveling on the road.", "visual_elements": [ { "content": "The camera cuts back to the man on the motorcycle, filmed from his front, then shakes and moves to the right.", "type": "camera", "weight": 3 }, { "content": "Behind the left side of the man on the motorcycle, a police car speeds forward, leaving the motorcycle behind.", "type": "action", "weight": 3 }, { "content": "After glancing to his left, the man on the motorcycle quickly turns his head down and finally looks ahead to his left.", "type": "action", "weight": 3 }, { "content": "Behind the man on the motorcycle, there is another police car.", "type": "attribute", "weight": 2 }, { "content": "In the background, there are other vehicles traveling on the road.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0165", "video_path": "VELOCITI/v_1shru4620TE_seg_105_115.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "As the video begins, the camera focuses on a middle-aged woman with curly hair. She wears a blue and white checkered shirt and is obese. Standing in a room, she looks to her right.\nThe camera then cuts to a man in a dark shirt, holding handcuffs in his left hand and looking to his left front. In front of him is the curly-haired woman, who has turned her head to the left to look behind her, facing away from the camera.\nNext, the camera cuts to an outdoor scene, capturing the upper torsos of two adults. On the left is a woman in a black coat and sunglasses, facing the camera and talking while looking ahead. On the right is a man in a gray T-shirt and a necklace, also facing the camera and looking ahead before raising and then lowering an object with his right hand. The background features parked cars, buildings, and trees.\nThe camera cuts back to the curly-haired woman. In the background, there are bookshelves and some decorations. She grins and then bows her head to her right. The camera quickly pans up to the left, cutting to the man in the dark shirt. He looks surprised and then speaks while turning his head to look ahead.\nThe camera cuts to a doorway. The man in the dark shirt stands in the middle of the room behind the door, while the curly-haired woman runs towards the camera quickly.\nThe camera cuts back to the man in the dark shirt, who turns his head to the right.\nSubsequently, the camera cuts to a wider angle, farther from the man in the dark shirt. The curly-haired woman enters from the right side of the scene, looking towards the camera direction before turning her head to the right to look ahead. She holds an object and runs quickly to the left side of the scene. The curly-haired woman passes the man in the dark shirt, who stands in the center of the scene. Finally, the camera moves down and to the left, as the curly-haired woman bends over to put down an object. Meanwhile, the man in the dark shirt turns his head to the right, following the curly-haired woman with his gaze.", "events": [ { "event": "As the video begins, the camera focuses on a middle-aged woman with curly hair. She wears a blue and white checkered shirt and is obese. Standing in a room, she looks to her right.", "visual_elements": [ { "content": "The camera focuses on a middle-aged woman with curly hair.", "type": "camera", "weight": 3 }, { "content": "The curly-haired woman wears a blue and white checked shirt and is obese.", "type": "attribute", "weight": 2 }, { "content": "The curly-haired woman stands in a room, looking to her right.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera then cuts to a man in a dark shirt, holding handcuffs in his left hand and looking to his left front. In front of him is the curly-haired woman, who has turned her head to the left to look behind her, facing away from the camera.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A man in a dark shirt holds handcuffs in his left hand and looks to his left front.", "type": "attribute", "weight": 3 }, { "content": "In front of the man in the dark shirt is the curly-haired woman facing away from the camera.", "type": "attribute", "weight": 2 }, { "content": "The curly-haired woman turns her head to her left, looking behind her.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to an outdoor scene, capturing the upper torsos of two adults. On the left is a woman in a black coat and sunglasses, facing the camera and talking while looking ahead. On the right is a man in a gray T-shirt and a necklace, also facing the camera and looking ahead before raising and then lowering an object with his right hand. The background features parked cars, buildings, and trees.", "visual_elements": [ { "content": "The camera cuts outdoors, showing the upper torsos of two adults in a frontal view.", "type": "camera", "weight": 3 }, { "content": "The woman on the left scene wears a black coat and sunglasses.", "type": "attribute", "weight": 3 }, { "content": "The woman on the left scene speaks while looking forward in the direction of the camera.", "type": "action", "weight": 3 }, { "content": "The man on the right scene wears a gray T-shirt and a necklace.", "type": "attribute", "weight": 3 }, { "content": "The man on the right scene looks forward in the direction of the camera.", "type": "attribute", "weight": 3 }, { "content": "The man on the right scene raises an object in his right hand and then puts it down.", "type": "action", "weight": 2 }, { "content": "Parked cars, buildings, and trees are visible in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the curly-haired woman. In the background, there are bookshelves and some decorations. She grins and then bows her head to her right. The camera quickly pans up to the left, cutting to the man in the dark shirt. He looks surprised and then speaks while turning his head to look ahead.", "visual_elements": [ { "content": "The camera cuts back to the curly-haired woman.", "type": "camera", "weight": 3 }, { "content": "There are bookshelves and some decorations in the background.", "type": "scene", "weight": 2 }, { "content": "The curly-haired woman grins.", "type": "attribute", "weight": 3 }, { "content": "The curly-haired woman bows her head to her right.", "type": "action", "weight": 3 }, { "content": "The camera quickly pans up to the left, cutting to the man in the dark shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the dark shirt looks surprised.", "type": "attribute", "weight": 3 }, { "content": "The man in the dark shirt speaks while turning his head to look ahead.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a doorway. The man in the dark shirt stands in the middle of the room behind the door, while the curly-haired woman runs towards the camera quickly.", "visual_elements": [ { "content": "The camera cuts to a door.", "type": "camera", "weight": 3 }, { "content": "The man in the dark shirt stands in the center of the room behind the door.", "type": "attribute", "weight": 2 }, { "content": "The curly-haired woman runs quickly towards the camera.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the man in the dark shirt, who turns his head to the right.", "visual_elements": [ { "content": "The camera cuts to the man in the dark shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the dark shirt turns his head to his right.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera cuts to a wider angle, farther from the man in the dark shirt. The curly-haired woman enters from the right side of the scene, looking towards the camera direction before turning her head to the right to look ahead. She holds an object and runs quickly to the left side of the scene. The curly-haired woman passes the man in the dark shirt, who stands in the center of the scene. Finally, the camera moves down and to the left, as the curly-haired woman bends over to put down an object. Meanwhile, the man in the dark shirt turns his head to the right, following the curly-haired woman with his gaze.", "visual_elements": [ { "content": "The camera cuts to a wider angle, farther from the man in the dark shirt.", "type": "camera", "weight": 3 }, { "content": "The curly-haired woman enters from the right side of the scene, looks towards the camera, and then turns her head to her right to look ahead.", "type": "attribute", "weight": 3 }, { "content": "The curly-haired woman holds an object and runs quickly to the left side of the scene.", "type": "action", "weight": 2 }, { "content": "The curly-haired woman passes the man in the dark shirt, who stands in the center of the scene.", "type": "action", "weight": 3 }, { "content": "The camera pans down and to the left.", "type": "camera", "weight": 3 }, { "content": "The curly-haired woman bends down to put down an object.", "type": "action", "weight": 3 }, { "content": "The man in the dark shirt turns his head to the right, following the curly-haired woman with his gaze.", "type": "action", "weight": 2 } ] } ], "n_events": 7, "n_elements": 33 }, { "index": "TUNA_0166", "video_path": "VELOCITI/v_1shru4620TE_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a shaky camera capturing a man in a dark coat from the front. The man lifts a square-shaped object, ready to throw it. To the left of the scene, a guitar stands vertically.\nThe camera cuts to a panoramic view of a room. The room is warmly decorated with curtains, chairs, and various ornaments. In the front left of the scene is an obese woman with curly blonde hair wearing a light blue shirt. Behind her on the left kneels a man, who throws the square object he's holding towards the woman. The woman runs towards the camera, getting hit on her left shoulder by the object, and lets out a scream.\nThe camera cuts to the woman's back as she falls to the ground face-down. To her left is a statue of a knight in armor and the guitar.\nThe camera cuts to the man, who shouts ahead and then stands up.\nThe camera cuts to the man's back as he runs forward quickly. The woman gets up, supporting herself on her hands and knees. As the man passes by her, he touches her back with his left hand.\nThe camera cuts to the woman's front. She speaks as she straightens her upper body.\nThen, the camera cuts to the woman's back. The woman kneels on one knee, supporting herself with her left hand on the wall and her right hand on her head. In front of her, the man stands sideways behind a door. He turns to face her, extending his left hand and taking two steps forward. The woman slowly stands up, leaning on the wall.\nThe camera cuts to a close-up of the woman's upper body, capturing her from the front.\nFinally, the camera cuts to behind the woman. She leans forward, rushing towards the man opposite her.", "events": [ { "event": "The video begins with a shaky camera capturing a man in a dark coat from the front. The man lifts a square-shaped object, ready to throw it. To the left of the scene, a guitar stands vertically.", "visual_elements": [ { "content": "The camera shakes, showing a man in a dark coat in a frontal view.", "type": "camera", "weight": 3 }, { "content": "The man raises a square object, ready to throw it.", "type": "action", "weight": 3 }, { "content": "A guitar stands vertically on the left side of the scene.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a panoramic view of a room. The room is warmly decorated with curtains, chairs, and various ornaments. In the front left of the scene is an obese woman with curly blonde hair wearing a light blue shirt. Behind her on the left kneels a man, who throws the square object he's holding towards the woman. The woman runs towards the camera, getting hit on her left shoulder by the object, and lets out a scream.", "visual_elements": [ { "content": "The camera cuts to a panoramic view of a room.", "type": "camera", "weight": 3 }, { "content": "The room is cozy, with curtains, chairs, and various decorations.", "type": "scene", "weight": 2 }, { "content": "In the left front of the scene is an obese woman in a light blue shirt with curly blonde hair.", "type": "attribute", "weight": 3 }, { "content": "Behind the woman on her left knee is a man.", "type": "attribute", "weight": 3 }, { "content": "The man throws the square object at the woman.", "type": "action", "weight": 2 }, { "content": "The woman runs towards the camera, hit on her left shoulder by the square object, and screams.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the woman's back as she falls to the ground face-down. To her left is a statue of a knight in armor and the guitar.", "visual_elements": [ { "content": "The camera cuts to behind the woman.", "type": "camera", "weight": 3 }, { "content": "The woman falls face-down onto the ground.", "type": "action", "weight": 3 }, { "content": "To the left of the woman is a statue of a knight in armor and a guitar.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to the man, who shouts ahead and then stands up.", "visual_elements": [ { "content": "The camera cuts to the man.", "type": "camera", "weight": 3 }, { "content": "The man shouts and stands up.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the man's back as he runs forward quickly. The woman gets up, supporting herself on her hands and knees. As the man passes by her, he touches her back with his left hand.", "visual_elements": [ { "content": "The camera cuts to behind the man.", "type": "camera", "weight": 3 }, { "content": "The man runs quickly forward.", "type": "action", "weight": 3 }, { "content": "The woman crawls up, supporting herself on her hands and knees.", "type": "action", "weight": 2 }, { "content": "As the man passes the woman, his left hand touches her back.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the woman's front. She speaks as she straightens her upper body.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman speaks as she straightens her upper body.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera cuts to the woman's back. The woman kneels on one knee, supporting herself with her left hand on the wall and her right hand on her head. In front of her, the man stands sideways behind a door. He turns to face her, extending his left hand and taking two steps forward. The woman slowly stands up, leaning on the wall.", "visual_elements": [ { "content": "The camera cuts to a back view of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman kneels on one knee, supporting herself with her left hand on the wall and her right hand on her head.", "type": "attribute", "weight": 3 }, { "content": "In front of the woman, the man stands sideways behind the door.", "type": "attribute", "weight": 3 }, { "content": "The man turns to face the woman, extends his left hand, and takes two steps forward.", "type": "action", "weight": 2 }, { "content": "The woman slowly stands up, supporting herself on the wall.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up of the woman's upper body, capturing her from the front.", "visual_elements": [ { "content": "The camera cuts to an upper torso view of the woman, showing her frontally.", "type": "camera", "weight": 3 } ] }, { "event": "Finally, the camera cuts to behind the woman. She leans forward, rushing towards the man opposite her.", "visual_elements": [ { "content": "Finally, the camera cuts to behind the woman.", "type": "camera", "weight": 3 }, { "content": "The woman leans forward and rushes towards the man opposite her.", "type": "action", "weight": 3 } ] } ], "n_events": 9, "n_elements": 28 }, { "index": "TUNA_0167", "video_path": "VELOCITI/v_2MxnokvI6c0_seg_155_165.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the scene focuses on a futuristic aircraft parked in the desert as the focal point. To the right of the aircraft's scene lie two corpses on the ground. Two men dressed in black combat suits jump down from the aircraft. The foreground is blurred, showing a hand holding a large gun pointed at the aircraft ahead.\nThe camera cuts, revealing the front of a blonde woman. She wears black attire, kneels on both knees, bows her head, and is operating a large gun with both hands.\nThe camera cuts to a closer angle, shooting the woman from her left front in an upward perspective.\nSubsequently, the camera cuts. A man dressed in a black combat suit advances slowly with a gun in his right hand. His body knocks over a wooden fence, causing the boards to fall. Others follow behind him.\nThe camera cuts back to the woman holding a gun.\nThe camera cuts to a wider view. Two men dressed in black combat suits, weapons in hand, quickly march towards the camera.\nThe camera cuts back to the woman, who utters a sentence towards the front and quickly ducks to the right side of the scene.\nThe camera cuts. Two men dressed in black combat suits, with ropes tied to their bodies, jump out of a window. Inside the window, flames shoot out, and smoke fills the air.\nNext, the camera cuts to the woman and then pans upwards. A man wearing a black hat and black uniform grabs the woman's right hand from behind. She elbows the man in the black hat with her left arm. Around them, many men in black uniforms with guns quickly appear, surrounding the woman.", "events": [ { "event": "At the beginning of the video, the scene focuses on a futuristic aircraft parked in the desert as the focal point. To the right of the aircraft's scene lie two corpses on the ground. Two men dressed in black combat suits jump down from the aircraft. The foreground is blurred, showing a hand holding a large gun pointed at the aircraft ahead.", "visual_elements": [ { "content": "The scene focuses on a futuristic aircraft parked in the desert.", "type": "camera", "weight": 3 }, { "content": "To the right of the aircraft's scene lie two corpses on the ground.", "type": "scene", "weight": 2 }, { "content": "Two men dressed in black combat suits jump down from the aircraft.", "type": "action", "weight": 3 }, { "content": "The foreground is blurred.", "type": "attribute", "weight": 3 }, { "content": "A hand holds a large firearm pointed at the aircraft in front.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts, revealing the front of a blonde woman. She wears black attire, kneels on both knees, bows her head, and is operating a large gun with both hands.", "visual_elements": [ { "content": "The camera cuts, revealing the front of a blonde woman.", "type": "camera", "weight": 3 }, { "content": "The woman wears black attire, kneels on both knees, bows her head.", "type": "attribute", "weight": 3 }, { "content": "The woman is operating a large gun with both hands.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a closer angle, shooting the woman from her left front in an upward perspective.", "visual_elements": [ { "content": "The camera cuts to a closer angle, shooting the woman from her left front in an upward perspective.", "type": "camera", "weight": 3 } ] }, { "event": "Subsequently, the camera cuts. A man dressed in a black combat suit advances slowly with a gun in his right hand. His body knocks over a wooden fence, causing the boards to fall. Others follow behind him.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A man dressed in a black combat suit advances slowly with a gun in his right hand.", "type": "action", "weight": 3 }, { "content": "The man in the black combat suit, holding the gun, knocks over a wooden fence with his body.", "type": "action", "weight": 2 }, { "content": "The wooden boards fall off.", "type": "scene", "weight": 2 }, { "content": "Others follow behind the man.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts back to the woman holding a gun.", "visual_elements": [ { "content": "The camera cuts back to the woman holding a gun.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to a wider view. Two men dressed in black combat suits, weapons in hand, quickly march towards the camera.", "visual_elements": [ { "content": "The camera cuts to the woman and subsequently to a wider view.", "type": "camera", "weight": 3 }, { "content": "Two men dressed in black combat suits, weapons in hand, quickly march towards the camera.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the woman, who utters a sentence towards the front and quickly ducks to the right side of the scene.", "visual_elements": [ { "content": "The camera cuts back to the woman.", "type": "camera", "weight": 3 }, { "content": "After uttering a sentence towards the front, the woman quickly ducks to the right side of the scene.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts. Two men dressed in black combat suits, with ropes tied to their bodies, jump out of a window. Inside the window, flames shoot out, and smoke fills the air.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "Two men dressed in black combat suits, with ropes tied to their bodies, jump out of a window.", "type": "action", "weight": 3 }, { "content": "Inside the window, flames shoot out, and smoke fills the air.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts to the woman and then pans upwards. A man wearing a black hat and black uniform grabs the woman's right hand from behind. She elbows the man in the black hat with her left arm. Around them, many men in black uniforms with guns quickly appear, surrounding the woman.", "visual_elements": [ { "content": "The camera cuts to the woman and then pans upwards.", "type": "camera", "weight": 3 }, { "content": "A man wearing a black hat and black uniform grabs the woman's right hand from behind.", "type": "action", "weight": 3 }, { "content": "The woman elbows the man in the black hat with her left arm.", "type": "action", "weight": 3 }, { "content": "Around them, many men in black uniforms with guns quickly appear, surrounding the woman.", "type": "action", "weight": 3 } ] } ], "n_events": 9, "n_elements": 26 }, { "index": "TUNA_0168", "video_path": "VELOCITI/v_2MxnokvI6c0_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera faces a male wearing a black hood directly. The male's eyes are surrounded with black paint. The background is completely dark.\nNext, the camera cuts to a female with blonde hair tied into double pigtails, wearing a black wide-brimmed hat and a white necklace around her neck. The female turns to her left, facing the camera. An item hangs from her right hand finger, which she then throws towards the right side of the screen.\nThe camera cuts again, providing an aerial view of the entire scene. The female sits on the sandy ground, supporting her head with her right hand and gazing at her left front. A lamp is placed on her left side. The background is a sandy area. The camera pans left, revealing a male in a black combat suit sliding down the sand behind the female's right side, holding a gun pointed at her. The female turns her head to the right to look at the male in the black combat suit.\nThe camera cuts to an overhead shot of the male in the black combat suit, with a red laser dot appearing at the tip of his weapon. In the foreground, the female can be seen lowering her right hand that was supporting her head.\nThe camera cuts to the female, who turns her head to look behind her right side. A laser dot appears on her neck and then moves away and disappears. Finally, the camera cuts up and to the left, revealing a distant, dilapidated house. A group of people dressed in black are running towards the house with their backs to the camera.", "events": [ { "event": "At the beginning of the video, the camera faces a male wearing a black hood directly. The male's eyes are surrounded with black paint. The background is completely dark.", "visual_elements": [ { "content": "The camera faces a male wearing a black hood directly.", "type": "camera", "weight": 3 }, { "content": "The male's eyes are surrounded with black paint.", "type": "attribute", "weight": 2 }, { "content": "The background is completely dark.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts to a female with blonde hair tied into double pigtails, wearing a black wide-brimmed hat and a white necklace around her neck. The female turns to her left, facing the camera. An item hangs from her right hand finger, which she then throws towards the right side of the screen.", "visual_elements": [ { "content": "The camera cuts to a female.", "type": "camera", "weight": 3 }, { "content": "The blonde hair of this female is tied into double pigtails.", "type": "attribute", "weight": 3 }, { "content": "The female is wearing a black wide-brimmed hat and a white necklace around her neck.", "type": "attribute", "weight": 2 }, { "content": "The female turns to her left, facing the camera.", "type": "camera", "weight": 3 }, { "content": "An item hangs from the right hand finger of this woman.", "type": "attribute", "weight": 2 }, { "content": "The female then throws the item towards the right side of the screen.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again, providing an aerial view of the entire scene. The female sits on the sandy ground, supporting her head with her right hand and gazing at her left front. A lamp is placed on her left side. The background is a sandy area. The camera pans left, revealing a male in a black combat suit sliding down the sand behind the female's right side, holding a gun pointed at her. The female turns her head to the right to look at the male in the black combat suit.", "visual_elements": [ { "content": "The camera cuts agai, providing an aerial view of the entire scene.", "type": "camera", "weight": 3 }, { "content": "The female sits on the sandy ground, supporting her head with her right hand and gazing at her left front.", "type": "action", "weight": 2 }, { "content": "A lamp is placed on the left side of the woman.", "type": "scene", "weight": 2 }, { "content": "The background is a sandy area.", "type": "scene", "weight": 2 }, { "content": "The camera pans left.", "type": "camera", "weight": 3 }, { "content": "Behind the female's right side, a male in a black combat suit slides down the sand.", "type": "action", "weight": 3 }, { "content": "The male holds a gun pointed at the female.", "type": "attribute", "weight": 2 }, { "content": "The female turns her head to the right to look at the male in the black combat suit.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to an overhead shot of the male in the black combat suit, with a red laser dot appearing at the tip of his weapon. In the foreground, the female can be seen lowering her right hand that was supporting her head.", "visual_elements": [ { "content": "The camera cuts to an overhead shot of the male in the black combat suit.", "type": "camera", "weight": 3 }, { "content": "A red laser dot appears at the tip of the weapon held by the male in the black combat suit.", "type": "attribute", "weight": 2 }, { "content": "In the foreground, the female can be seen lowering her right hand that was supporting her head.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to the female, who turns her head to look behind her right side. A laser dot appears on her neck and then moves away and disappears. Finally, the camera cuts up and to the left, revealing a distant, dilapidated house. A group of people dressed in black are running towards the house with their backs to the camera.", "visual_elements": [ { "content": "The camera cuts to the female.", "type": "camera", "weight": 3 }, { "content": "The female turns her head to look behind her right side.", "type": "action", "weight": 3 }, { "content": "A laser dot appears on the female's neck.", "type": "attribute", "weight": 2 }, { "content": "The dot then moves away and disappears.", "type": "action", "weight": 2 }, { "content": "Finally, the camera cuts up and to the left.", "type": "camera", "weight": 3 }, { "content": "In the distance, there is a dilapidated house.", "type": "attribute", "weight": 3 }, { "content": "A group of people dressed in black are running towards the house with their backs to the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 27 }, { "index": "TUNA_0169", "video_path": "VELOCITI/v_2MxnokvI6c0_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an old, dilapidated house, with a dark sky behind it. A group of people dressed in black approach the house from different directions in front of it.\nNext, the camera cuts to the inside of the house, where a person wearing an orange-striped shirt and long curly hair walks towards the door with their back to the camera and opens it.\nThe camera cuts to a front view of the curly-haired person, who is a male. The male looks up ahead and then opens his mouth wide.\nThe camera cuts to a view from behind the male. He turns around to face the camera, holding a bottle of liquor in his right hand as he walks forward. Someone outside the door shoots the male in the back vigorously. The male spreads his arms wide, looking painful. The camera pans slightly downwards, and the bottle in the male's right hand falls, followed by the male collapsing forward to the ground. In the background, a black-clothed male puts down his gun at the doorway.\nThe camera cuts to behind a worn-out door and pans upwards. The door opens, and two males dressed in black uniforms enter, walking towards the camera before stopping and finally looking to their left front.", "events": [ { "event": "At the beginning of the video, the camera focuses on an old, dilapidated house, with a dark sky behind it. A group of people dressed in black approach the house from different directions in front of it.", "visual_elements": [ { "content": "The camera focuses on an old, dilapidated house.", "type": "camera", "weight": 3 }, { "content": "The sky behind the house is dark.", "type": "scene", "weight": 2 }, { "content": "A group of people dressed in black approach the house from different directions in front of it.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to the inside of the house, where a person wearing an orange-striped shirt and long curly hair walks towards the door with their back to the camera and opens it.", "visual_elements": [ { "content": "The camera cuts to the inside of the house.", "type": "camera", "weight": 3 }, { "content": "A person wearing an orange-striped shirt and long curly hair walks towards the door with their back to the camera and opens it.", "type": "action", "weight": 3 }, { "content": "The curly-haired person opens the door.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the curly-haired person, who is a male. The male looks up ahead and then opens his mouth wide.", "visual_elements": [ { "content": "The camera cuts to a front view of the curly-haired person.", "type": "camera", "weight": 3 }, { "content": "The curly-haired person is a male.", "type": "attribute", "weight": 2 }, { "content": "The male looks up ahead and then opens his mouth wide.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a view from behind the male. He turns around to face the camera, holding a bottle of liquor in his right hand as he walks forward. Someone outside the door shoots the male in the back vigorously. The male spreads his arms wide, looking painful. The camera pans slightly downwards, and the bottle in the male's right hand falls, followed by the male collapsing forward to the ground. In the background, a black-clothed male puts down his gun at the doorway.", "visual_elements": [ { "content": "The camera cuts to a view from behind the male.", "type": "camera", "weight": 3 }, { "content": "The male turns around to face the camera.", "type": "action", "weight": 2 }, { "content": "The male holds a bottle of liquor in his right hand as he walks forward.", "type": "action", "weight": 3 }, { "content": "Someone outside the door shoots the male in the back vigorously.", "type": "action", "weight": 3 }, { "content": "The male spreads his arms wide, looking painful.", "type": "action", "weight": 3 }, { "content": "The camera pans slightly downwards.", "type": "camera", "weight": 3 }, { "content": "The bottle in the male's right hand falls, followed by the male collapsing forward to the ground.", "type": "action", "weight": 3 }, { "content": "In the background, a black-clothed male puts down his gun at the doorway.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to behind a worn-out door and pans upwards. The door opens, and two males dressed in black uniforms enter, walking towards the camera before stopping and finally looking to their left front.", "visual_elements": [ { "content": "The camera cuts to behind a worn-out door and pans upwards.", "type": "camera", "weight": 3 }, { "content": "The door opens.", "type": "action", "weight": 3 }, { "content": "Two male dressed in black combat suits enter the door and walk towards the camera direction before stopping.", "type": "action", "weight": 3 }, { "content": "The two male look towards their left front.", "type": "action", "weight": 2 } ] } ], "n_events": 5, "n_elements": 21 }, { "index": "TUNA_0170", "video_path": "VELOCITI/v_2MxnokvI6c0_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the scene focuses on two males dressed in black combat suits. Both males have black paint on their eyes. The male walking in front wears a metal device on his head. The male with the metal device on his head turns to his right and then runs forward. Subsequently, he turns left and continues running. The camera follows the male's movement, first panning downwards to the left and then continuing to move leftwards. The male with the metal device on his head stops, first turns his head to look back, and then crouches down. The camera follows the male downwards. The male kneels on one knee and flips the switch of an old-fashioned water pump in front of him. Water flows out of the pump's spout into a bucket placed in front of it.\nFinally, the camera cuts to an upward shot of a male wearing a black hood and dressed in a black combat suit. The hooded male holds a gun in his right hand. In the foreground, on the right side of the scene, there is a female wearing a black wide-brimmed hat. The male looks down at the female in front of him, pulls down the bottom half of his hood with his left hand to reveal his nose and mouth, and then places his left hand on the gun as well.", "events": [ { "event": "At the beginning of the video, the scene focuses on two males dressed in black combat suits. Both males have black paint on their eyes. The male walking in front wears a metal device on his head. The male with the metal device on his head turns to his right and then runs forward. Subsequently, he turns left and continues running. The camera follows the male's movement, first panning downwards to the left and then continuing to move leftwards. The male with the metal device on his head stops, first turns his head to look back, and then crouches down. The camera follows the male downwards. The male kneels on one knee and flips the switch of an old-fashioned water pump in front of him. Water flows out of the pump's spout into a bucket placed in front of it.", "visual_elements": [ { "content": "The scene focuses on two males dressed in black combat suits.", "type": "attribute", "weight": 3 }, { "content": "Both males have black paint on their eyes.", "type": "attribute", "weight": 2 }, { "content": "The male walking in front wears a metal device on his head.", "type": "attribute", "weight": 3 }, { "content": "The male with the metal device on his head turns to his right and runs forward.", "type": "action", "weight": 3 }, { "content": "Subsequently, the male with the metal device on his head turns left and continues running.", "type": "action", "weight": 3 }, { "content": "The camera follows the male's movement, first panning downwards to the left and then continuing to move leftwards.", "type": "camera", "weight": 3 }, { "content": "The male with the metal device on his head stops, first turns his head to look back, and then crouches down.", "type": "action", "weight": 3 }, { "content": "The camera follows the male downwards.", "type": "camera", "weight": 3 }, { "content": "The male kneels on one knee and flips the switch of an old-fashioned water pump in front of him.", "type": "action", "weight": 2 }, { "content": "Water flows out of the pump's spout into a bucket placed in front of it.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to an upward shot of a male wearing a black hood and dressed in a black combat suit. The hooded male holds a gun in his right hand. In the foreground, on the right side of the scene, there is a female wearing a black wide-brimmed hat. The male looks down at the female in front of him, pulls down the bottom half of his hood with his left hand to reveal his nose and mouth, and then places his left hand on the gun as well.", "visual_elements": [ { "content": "The camera cuts to an upward shot of a male wearing a black hood and dressed in a black combat suit.", "type": "camera", "weight": 3 }, { "content": "The hooded male holds a gun in his right hand.", "type": "attribute", "weight": 2 }, { "content": "In the foreground, on the right side of the scene, there is a female wearing a black wide-brimmed hat.", "type": "attribute", "weight": 3 }, { "content": "The male looks down at the female in front of him.", "type": "attribute", "weight": 2 }, { "content": "The male pulls down the bottom half of his hood with his left hand to reveal his nose and mouth.", "type": "action", "weight": 3 }, { "content": "Then, the male places his left hand on the gun as well.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0171", "video_path": "VELOCITI/v_2mz3oytpugs_seg_125_135.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a duck-like character resembling a human and a young female. The duck character is positioned on the left side of the screen, wearing a gray coat. The duck character adjusts a spotted tie worn over its chest with both hands. The young female is on the right side of the screen, dressed in a red jacket with black and white striped sleeves. She sits on the ground, gazing at the duck character and holding a metal pole in her hand. The female throws the metal pole to the ground. The duck character turns its head to the left to look at the female. The female leans forward, looks at the duck character, and speaks to it.\nThe camera cuts to the young female. The female stands up, looking and speaking towards the front while retreating towards her left rear.\nThe camera cuts back to a front view of the duck character. The duck character has its hands placed on its chest, looking straight ahead.\nFinally, the camera cuts to the young female. The female picks up a black messenger bag containing a red umbrella and turns her head to the left while looking upwards. Then, she bends down, reaching her left hand downwards, and finally turns her head to the right to look in the direction of the camera.", "events": [ { "event": "At the beginning of the video, the camera focuses on a duck-like character resembling a human and a young female. The duck character is positioned on the left side of the screen, wearing a gray coat. The duck character adjusts a spotted tie worn over its chest with both hands. The young female is on the right side of the screen, dressed in a red jacket with black and white striped sleeves. She sits on the ground, gazing at the duck character and holding a metal pole in her hand. The female throws the metal pole to the ground. The duck character turns its head to the left to look at the female. The female leans forward, looks at the duck character, and speaks to it.", "visual_elements": [ { "content": "The camera focuses on a duck-like character resembling a human and a young female.", "type": "camera", "weight": 3 }, { "content": "The duck character is positioned on the left side of the screen, wearing a gray coat.", "type": "attribute", "weight": 3 }, { "content": "The duck character adjusts a spotted tie worn over its chest with both hands.", "type": "action", "weight": 3 }, { "content": "The young female is on the right side of the screen, dressed in a red jacket with black and white striped sleeves.", "type": "attribute", "weight": 3 }, { "content": "The female sits on the ground, gazing at the duck character and holding a metal pole in her hand.", "type": "attribute", "weight": 3 }, { "content": "The female throws the metal pole to the ground.", "type": "action", "weight": 3 }, { "content": "The duck character turns its head to the left to look at the female.", "type": "action", "weight": 3 }, { "content": "The female leans forward, looks at the duck character, and speaks to it.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the young female. The female stands up, looking and speaking towards the front while retreating towards her left rear.", "visual_elements": [ { "content": "The camera cuts to the young female.", "type": "camera", "weight": 3 }, { "content": "The female stands up, looking and speaking towards the front while retreating towards her left rear.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to a front view of the duck character. The duck character has its hands placed on its chest, looking straight ahead.", "visual_elements": [ { "content": "The camera cuts back to a front view of the duck character.", "type": "camera", "weight": 3 }, { "content": "The duck character has its hands placed on its chest, looking straight ahead.", "type": "attribute", "weight": 3 } ] }, { "event": "Finally, the camera cuts to the young female. The female picks up a black messenger bag containing a red umbrella and turns her head to the left while looking upwards. Then, she bends down, reaching her left hand downwards, and finally turns her head to the right to look in the direction of the camera.", "visual_elements": [ { "content": "The camera cuts to the young female.", "type": "camera", "weight": 3 }, { "content": "The female picks up a black messenger bag containing a red umbrella and turns her head to the left while looking upwards.", "type": "action", "weight": 3 }, { "content": "The female bends down, reaching her left hand downwards.", "type": "action", "weight": 3 }, { "content": "The female turns her head to the right to look in the direction of the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0172", "video_path": "VELOCITI/v_2mz3oytpugs_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a dim outdoor setting. In the scene, there is a white duck character dressed in a gray suit, and in front of the duck character is a person lying on the ground wearing a dark coat and red underwear. The lying person rolls over to their left. The duck character jumps up, about to land on the lying person.\nThe camera cuts to a close-up scene of the duck character stepping on the lying person's abdomen.\nThe camera cuts to a young woman wearing a red jacket sitting sideways on the ground, looking towards her right front. Behind her is a man dressed in dark clothes. The man bends down to control the woman. The man lets go and stands up straight, looking ahead. The woman turns her head back, raises her left leg upwards, and trips the man, causing him to fall forward.\nThe camera cuts to behind the adult male. The male falls face-down onto the ground. In front of where the male fell, the duck character runs forward and then bends down to pick up something. In the background, there is a person lying on the ground moaning.\nThe scene changes to a male lying on the ground on his stomach, supporting himself with his hands and straightening his upper body to look ahead. In front of the male, the duck character raises a large round cover with both hands and hits the male on the head, causing him to fall face-down. The young woman on the left side of the scene first looks at the duck character and then at the hit male.\nThe camera cuts again. There are four characters in the scene. In the center of the scene, the young woman bends over and presses herself onto a male. On the left side of the scene, the duck character walks forward with its back to the camera. In front of the duck character, there is a male wearing red underwear.\nThe camera cuts to the duck character lowering its head and passing through the legs of a person wearing plaid pants.\nThe camera cuts to the back of the duck character. The person wearing plaid pants stands with their legs apart and grabs the back of the duck character's clothes, which has just passed through their legs.\nFinally, the camera cuts. The woman pressing on the male is overturned.", "events": [ { "event": "At the beginning of the video, the camera focuses on a dim outdoor setting. In the scene, there is a white duck character dressed in a gray suit, and in front of the duck character is a person lying on the ground wearing a dark coat and red underwear. The lying person rolls over to their left. The duck character jumps up, about to land on the lying person.", "visual_elements": [ { "content": "The camera focuses on a dim outdoor setting.", "type": "camera", "weight": 3 }, { "content": "In the scene, there is a white duck character dressed in a gray suit.", "type": "attribute", "weight": 3 }, { "content": "In front of the duck character, a person wearing a dark coat and red underwear lies on the ground.", "type": "attribute", "weight": 2 }, { "content": "The lying person rolls over to their left.", "type": "action", "weight": 2 }, { "content": "The duck character jumps up, about to land on the lying person.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up scene of the duck character stepping on the lying person's abdomen.", "visual_elements": [ { "content": "The camera cuts to a close-up scene of the duck character stepping on the lying person's abdomen.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to a young woman wearing a red jacket sitting sideways on the ground, looking towards her right front. Behind her is a man dressed in dark clothes. The man bends down to control the woman. The man lets go and stands up straight, looking ahead. The woman turns her head back, raises her left leg upwards, and trips the man, causing him to fall forward.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A young woman wearing a red jacket sits sideway on the ground, looking towards her right front.", "type": "attribute", "weight": 3 }, { "content": "Behind the woman, there is a man dressed in dark clothes.", "type": "attribute", "weight": 3 }, { "content": "The man bends over to control the woman.", "type": "attribute", "weight": 2 }, { "content": "The man lets go and stands up straight, looking ahead.", "type": "action", "weight": 2 }, { "content": "The woman turns her head back and raises her left leg upwards.", "type": "action", "weight": 3 }, { "content": "The man trips and falls forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to behind the adult male. The male falls face-down onto the ground. In front of where the male fell, the duck character runs forward and then bends down to pick up something. In the background, there is a person lying on the ground moaning.", "visual_elements": [ { "content": "The camera cuts to behind the adult male.", "type": "camera", "weight": 3 }, { "content": "The male falls face-down onto the ground.", "type": "action", "weight": 2 }, { "content": "In front of where the male fell, the duck character runs forward and then bends down to pick up something.", "type": "action", "weight": 3 }, { "content": "In the background, there is a person lying on the ground moaning.", "type": "scene", "weight": 2 } ] }, { "event": "The scene changes to a male lying on the ground on his stomach, supporting himself with his hands and straightening his upper body to look ahead. In front of the male, the duck character raises a large round cover with both hands and hits the male on the head, causing him to fall face-down. The young woman on the left side of the scene first looks at the duck character and then at the hit male.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A male lies on the ground on his stomach, supporting himself with his hands and straightening his upper body to look ahead.", "type": "attribute", "weight": 3 }, { "content": "In front of the male, the duck character raises a large round cover with both hands and hits the male's head, causing him to fall face-down.", "type": "action", "weight": 3 }, { "content": "The young woman on the left side of the scene first looks at the duck character and then at the hit male.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts again. There are four characters in the scene. In the center of the scene, the young woman bends over and presses herself onto a male. On the left side of the scene, the duck character walks forward with its back to the camera. In front of the duck character, there is a male wearing red underwear.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "There are four characters in the scene.", "type": "attribute", "weight": 2 }, { "content": "In the center of the scene, the young woman bends over and presses herself onto a male.", "type": "action", "weight": 3 }, { "content": "On the left side of the scene, the duck character walks forward with its back to the camera.", "type": "action", "weight": 3 }, { "content": "In front of the duck character, there is a male wearing a red undershirt.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to the duck character lowering its head and passing through the legs of a person wearing plaid pants.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The duck character lowers its head and passes through the legs of a person wearing checkered pants.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the back of the duck character. The person wearing plaid pants stands with their legs apart and grabs the back of the duck character's clothes, which has just passed through their legs.", "visual_elements": [ { "content": "The camera cuts to the back of the duck character.", "type": "camera", "weight": 3 }, { "content": "The person wearing plaid pants stands with their legs apart and grabs the back of the duck character's clothes, which has just passed through their legs.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts. The woman pressing on the male is overturned.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The woman pressing onto the male is overturned.", "type": "action", "weight": 3 } ] } ], "n_events": 9, "n_elements": 32 }, { "index": "TUNA_0173", "video_path": "VELOCITI/v_2oMW26rEZUk_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the interior of a subway carriage. The lighting inside the carriage is dim, and there are visible bloodstains on the seats near the forefront of the scene. A blonde female wearing a patterned dress is lying on the ground with her back to the camera, fallen backwards.\nThe camera cuts to the female's right side. Her face and body are covered in blood, and she moans in pain. Subsequently, she rolls over towards the camera. The lighting in the background suddenly darkens for a moment and then brightens again.\nThen, the camera cuts. A male dressed in a dark suit is standing with his back to the camera. The male then turns to his left, facing the camera with his head bowed and looking ahead. The camera pans forward.\nFinally, the camera cuts to a frontal view of the female. The female crawls slowly forward in a pool of blood. Behind her, the male in the dark suit turns his body to face the camera directly, holding a large hammer in his right hand.", "events": [ { "event": "At the beginning of the video, the camera focuses on the interior of a subway carriage. The lighting inside the carriage is dim, and there are visible bloodstains on the seats near the forefront of the scene. A blonde female wearing a patterned dress is lying on the ground with her back to the camera, fallen backwards.", "visual_elements": [ { "content": "The camera focuses on the interior of a subway carriage.", "type": "camera", "weight": 3 }, { "content": "The lighting inside the carriage is dim.", "type": "scene", "weight": 2 }, { "content": "There are visible bloodstains on the seats at the forefront of the scene.", "type": "scene", "weight": 2 }, { "content": "A blonde female dressed in a floral dress has her back turned to the camera.", "type": "attribute", "weight": 3 }, { "content": "The female falls backwards onto the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the female's right side. Her face and body are covered in blood, and she moans in pain. Subsequently, she rolls over towards the camera. The lighting in the background suddenly darkens for a moment and then brightens again.", "visual_elements": [ { "content": "The camera cuts to the female's right side.", "type": "camera", "weight": 3 }, { "content": "The female's face and body are covered in blood.", "type": "attribute", "weight": 3 }, { "content": "The female moans in pain.", "type": "attribute", "weight": 2 }, { "content": "The female rolls over towards the camera.", "type": "action", "weight": 3 }, { "content": "The lighting suddenly darkens and then brightens again.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera cuts. A male dressed in a dark suit is standing with his back to the camera. The male then turns to his left, facing the camera with his head bowed and looking ahead. The camera pans forward.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A male dressed in a dark suit stands with his back to the camera.", "type": "attribute", "weight": 3 }, { "content": "The male turns to his left, facing the camera.", "type": "action", "weight": 3 }, { "content": "The male lowers his head and looks ahead.", "type": "attribute", "weight": 3 }, { "content": "The camera pans forward.", "type": "camera", "weight": 3 } ] }, { "event": "Finally, the camera cuts to a frontal view of the female. The female crawls slowly forward in a pool of blood. Behind her, the male in the dark suit turns his body to face the camera directly, holding a large hammer in his right hand.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the female.", "type": "camera", "weight": 3 }, { "content": "The female crawls slowly forward in a pool of blood.", "type": "action", "weight": 3 }, { "content": "Behind her, the male in the dark suit turns his body to face the camera directly.", "type": "action", "weight": 1 }, { "content": "The male holds a large hammer in his right hand.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0174", "video_path": "VELOCITI/v_2vV-8TyFBTI_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the interior of a vehicle, with brown interior walls. A man dressed in a uniform is sitting facing the camera, looking ahead at a person lying down. The person lying down has a visible tremor in the right hand placed on the chest. On the left and right sides of the scene, paramedics are each attending to a person lying down.\nNext, the camera cuts to a view of an injured man lying down, shot from above. The man's left eye is red. He turns his head slightly to the left, looking upwards. The injured man's chest is exposed, with electrocardiogram electrodes attached to it. His right hand, wearing a glove, is placed on his right chest. A hand places a red-wired stethoscope on the right chest area of the injured man.\nThe camera cuts back to the uniformed man sitting down, shot from his left side. He lowers his head, gazing downwards while speaking. The vehicle body shakes.\nFinally, the camera cuts back to the injured man lying down. A hand places the red stethoscope on the man's left chest area.", "events": [ { "event": "At the beginning of the video, the camera focuses on the interior of a vehicle, with brown interior walls. A man dressed in a uniform is sitting facing the camera, looking ahead at a person lying down. The person lying down has a visible tremor in the right hand placed on the chest. On the left and right sides of the scene, paramedics are each attending to a person lying down.", "visual_elements": [ { "content": "The camera focuses on the interior of a vehicle.", "type": "camera", "weight": 3 }, { "content": "The interior walls of the carriage are brown.", "type": "attribute", "weight": 2 }, { "content": "A man dressed in a uniform sits facing the camera.", "type": "attribute", "weight": 3 }, { "content": "The man looks ahead at a person lying down.", "type": "attribute", "weight": 3 }, { "content": "The person lying down has a visible tremor in the right hand placed on the chest.", "type": "action", "weight": 2 }, { "content": "On the left and right sides of the scene, paramedics are each attending to a person lying down.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts to a view of an injured man lying down, shot from above. The man's left eye is red. He turns his head slightly to the left, looking upwards. The injured man's chest is exposed, with electrocardiogram electrodes attached to it. His right hand, wearing a glove, is placed on his right chest. A hand places a red-wired stethoscope on the right chest area of the injured man.", "visual_elements": [ { "content": "The camera cuts to a view of an injured man lying down, shot from above.", "type": "camera", "weight": 3 }, { "content": "The man's left eye is red.", "type": "attribute", "weight": 2 }, { "content": "The man turns his head slightly to the left, looking upwards.", "type": "attribute", "weight": 3 }, { "content": "The injured man's chest is exposed, with electrocardiogram electrodes attached to it.", "type": "attribute", "weight": 3 }, { "content": "The injured man's right hand, wearing a glove, is placed on his right chest.", "type": "attribute", "weight": 3 }, { "content": "A hand places a red-wired stethoscope on the right chest area of the injured man.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts back to the uniformed man sitting down, shot from his left side. He lowers his head, gazing downwards while speaking. The vehicle body shakes.", "visual_elements": [ { "content": "The camera cuts back to the seated uniformed man, shot from his left side.", "type": "camera", "weight": 3 }, { "content": "The uniformed man lowers his head, gazing downwards while speaking.", "type": "action", "weight": 3 }, { "content": "The vehicle body shakes.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera cuts back to the injured man lying down. A hand places the red stethoscope on the man's left chest area.", "visual_elements": [ { "content": "Finally, the camera cuts back to the injured man lying down.", "type": "camera", "weight": 3 }, { "content": "A hand places the red stethoscope on the man's left chest area.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0175", "video_path": "VELOCITI/v_3FgbU0ZZzQY_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a male and a blonde female. The male is on the leftmost side of the screen, dressed in a blue coat and a green undershirt. The blonde female is in the middle of the screen, dressed in a brown coat and a black top. The camera pans right and then shakes downward. The male pushes the female towards the open car door on the right side of the screen. The background is blurred.\nThe scene cuts to a close-up of a curly-haired male's face. There is a blinding light to his right rear. He initially looks straight ahead, then turns his gaze to the left side of the screen.\nThe camera cuts.A male with long hair sits in the passenger seat of a silver car. He closes the door on his left side and keeps his gaze directed ahead.\nThe camera cuts to an exterior view of the car. A male dressed in a dark coat stands in front of the vehicle. In the background, there are two road lamps emitting bright white light. The right rear door of the car is open, and someone is preparing to get in.\nThe  camera cuts back to the close-up of the curly-haired male.\nThe camera cuts to the front of the car. The male in the blue coat sits in the driver's seat, twisting his head to look behind while maneuvering the steering wheel to reverse.", "events": [ { "event": "At the beginning of the video, the camera focuses on a male and a blonde female. The male is on the leftmost side of the screen, dressed in a blue coat and a green undershirt. The blonde female is in the middle of the screen, dressed in a brown coat and a black top. The camera pans right and then shakes downward. The male pushes the female towards the open car door on the right side of the screen. The background is blurred.", "visual_elements": [ { "content": "The camera focuses on a male and a blonde female.", "type": "camera", "weight": 3 }, { "content": "The male is on the leftmost side of the screen, dressed in a blue coat and a green undershirt.", "type": "attribute", "weight": 3 }, { "content": "The blonde female is in the middle of the screen, wearing a brown coat and a black top.", "type": "attribute", "weight": 3 }, { "content": "The camera pans right and then shakes downward.", "type": "camera", "weight": 3 }, { "content": "The male pushes the female towards the open car door on the right of the screen.", "type": "action", "weight": 3 }, { "content": "The background is blurred.", "type": "attribute", "weight": 2 } ] }, { "event": "The scene cuts to a close-up of a curly-haired male's face. There is a blinding light to his right rear. He initially looks straight ahead, then turns his gaze to the left side of the screen.", "visual_elements": [ { "content": "The scene cuts to a close-up of a curly-haired male's face.", "type": "camera", "weight": 3 }, { "content": "There is a blinding light to the right rear of the curly-haired male.", "type": "scene", "weight": 2 }, { "content": "The curly-haired male initially looks straight ahead, then turns his gaze to the left side of the screen.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts.A male with long hair sits in the passenger seat of a silver car. He closes the door on his left side and keeps his gaze directed ahead.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A male with long hair sits in the passenger seat of a silver car.", "type": "attribute", "weight": 3 }, { "content": "The long-haired male closes the door on his left side.", "type": "action", "weight": 3 }, { "content": "The long-haired male keeps his gaze directed ahead.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to an exterior view of the car. A male dressed in a dark coat stands in front of the vehicle. In the background, there are two road lamps emitting bright white light. The right rear door of the car is open, and someone is preparing to get in.", "visual_elements": [ { "content": "The camera cuts to an exterior view of the car.", "type": "camera", "weight": 3 }, { "content": "A male dressed in a dark coat stands in front of the vehicle.", "type": "attribute", "weight": 3 }, { "content": "In the background, there are two road lamps emitting bright white light.", "type": "scene", "weight": 2 }, { "content": "The right rear door of the car is open.", "type": "attribute", "weight": 2 }, { "content": "There is a person outside the train door preparing to get in.", "type": "action", "weight": 3 } ] }, { "event": "The  camera cuts back to the close-up of the curly-haired male.", "visual_elements": [ { "content": "The camera cuts back to the close-up of the curly-haired male.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to the front of the car. The male in the blue coat sits in the driver's seat, twisting his head to look behind while maneuvering the steering wheel to reverse.", "visual_elements": [ { "content": "The camera cuts to the front of the car.", "type": "attribute", "weight": 3 }, { "content": "The male in the blue coat sits in the driver's seat, twisting his head to look behind while maneuvering the steering wheel to reverse.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 21 }, { "index": "TUNA_0176", "video_path": "VELOCITI/v_3FgbU0ZZzQY_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a blonde female, wearing a brown coat and a black top. She first looks at the person opposite her, then turns her head to look down to her left, and finally looks back at the person opposite. In the foreground, there is a person's back on the left side of the screen.\nThe camera cuts to the front view of the person opposite the blonde female, revealing a middle-aged male wearing a light-colored undershirt and a dark coat. The middle-aged male frowns as he looks at the blonde female.\nThe camera cuts back to the blonde female, who extends her left arm in front of her body and looks ahead. A person stands behind and to the right of the blonde female. She then lowers her left arm and bends down to look below. The male behind and to the right of the blonde female wears a blue coat and a green undershirt. He takes something out of his pocket and operates it while looking down.\nThe camera cuts once again to the middle-aged male, who looks to his right front and speaks.\nNext, the camera cuts to a brightly lit scene, where a male with long brown hair stands on the left side of the screen, gazing towards the right. In the background, light shines from the center of the screen towards the camera, creating a noticeable halo in the top left corner. The long-haired male speaks as he retreats and exits from the left side of the screen.\nFinally, the camera cuts back to the blonde female and shakes downward to the left. She looks down and sees a person lying on the ground. To her right front, the male in the blue coat also gazes at the person lying on the ground.", "events": [ { "event": "At the beginning of the video, the camera focuses on a blonde female, wearing a brown coat and a black top. She first looks at the person opposite her, then turns her head to look down to her left, and finally looks back at the person opposite. In the foreground, there is a person's back on the left side of the screen.", "visual_elements": [ { "content": "The camera focuses on a blonde female.", "type": "camera", "weight": 3 }, { "content": "The female wears a brown coat and a black top.", "type": "attribute", "weight": 3 }, { "content": "The female first looks at the person opposite her, then turns her head to look down to her left, and finally looks back at the person opposite.", "type": "action", "weight": 3 }, { "content": "In the foreground, there is a person's back on the left side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to the front view of the person opposite the blonde female, revealing a middle-aged male wearing a light-colored undershirt and a dark coat. The middle-aged male frowns as he looks at the blonde female.", "visual_elements": [ { "content": "The camera cuts to the front view of the person opposite the blonde female.", "type": "camera", "weight": 3 }, { "content": "A middle-aged male wearing a light-colored undershirt and a dark coat appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged male frowns as he looks at the blonde female.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts back to the blonde female, who extends her left arm in front of her body and looks ahead. A person stands behind and to the right of the blonde female. She then lowers her left arm and bends down to look below. The male behind and to the right of the blonde female wears a blue coat and a green undershirt. He takes something out of his pocket and operates it while looking down.", "visual_elements": [ { "content": "The camera cuts back to the blonde female.", "type": "camera", "weight": 3 }, { "content": "The blonde female extends her left arm in front of her body and looks ahead.", "type": "attribute", "weight": 3 }, { "content": "A person stands behind and to the right of the blonde female.", "type": "attribute", "weight": 2 }, { "content": "The blonde female then lowers her left arm and bends down to look below.", "type": "action", "weight": 3 }, { "content": "The male behind and to the right of the blonde female wears a blue coat and a green undershirt.", "type": "attribute", "weight": 2 }, { "content": "The male takes something out of his pocket and operates it while looking down.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts once again to the middle-aged male, who looks to his right front and speaks.", "visual_elements": [ { "content": "The camera cuts once again to the middle-aged male.", "type": "camera", "weight": 3 }, { "content": "The middle-aged male looks to his right front and speaks.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a brightly lit scene, where a male with long brown hair stands on the left side of the screen, gazing towards the right. In the background, light shines from the center of the screen towards the camera, creating a noticeable halo in the top left corner. The long-haired male speaks as he retreats and exits from the left side of the screen.", "visual_elements": [ { "content": "The camera cuts to a brightly lit scene.", "type": "camera", "weight": 3 }, { "content": "A male with long brown hair stands on the left side of the screen, gazing towards the right.", "type": "attribute", "weight": 3 }, { "content": "In the background, light shines from the center of the screen towards the camera.", "type": "scene", "weight": 2 }, { "content": "A noticeable halo appears in the top left corner.", "type": "scene", "weight": 2 }, { "content": "The long-haired male speaks as he retreats and exits from the left side of the screen.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts back to the blonde female and shakes downward to the left. She looks down and sees a person lying on the ground. To her right front, the male in the blue coat also gazes at the person lying on the ground.", "visual_elements": [ { "content": "The camera cuts back to the blonde female and shakes downward to the left.", "type": "camera", "weight": 3 }, { "content": "The female looks down and sees a person lying on the ground.", "type": "attribute", "weight": 3 }, { "content": "To the right front of the female, the male in the blue coat also gazes at the person lying on the ground.", "type": "attribute", "weight": 2 } ] } ], "n_events": 6, "n_elements": 23 }, { "index": "TUNA_0177", "video_path": "VELOCITI/v_3IZVz7ukKyU_seg_105_115.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the front windshield of a cockpit. The pilot is a male wearing a hat, headphones, and sunglasses. The cockpit shakes violently, and there are numerous scratches on the front windshield.\nThe scene cuts to the backs of two individuals carrying large backpacks. Both are dressed in heavy winter clothing. Their arms are stretched up, gripping onto something. Their bodies sway back and forth.\nThe camera cuts to a wider angle. On the right side of the scene, a helicopter hovers at the edge of a steep snowy cliff. Two people hang in the air, holding onto the cabin door with both hands. A person wearing a red hat stands at the edge of the cliff, holding a climbing rope in his right hand. The helicopter rotates to the right side of the screen, moving closer to the cliff.\nThe scene changes, capturing the cliff scene from inside the helicopter. The camera shakes up and down. In the foreground, a climber dressed in dark blue clothing and wearing a red hat holds a rope in his right hand. Behind him are three other climbers, with the two on the sides kneeling on one knee and the one in the middle sitting on the ground. The wind and snow are very heavy, and the weather conditions are harsh.\nFinally, the camera cuts again, showing the helicopter's nose turning towards the cliff. The climber wearing the red hat bends down, stretching out his left hand, attempting to reach the helicopter's nose.", "events": [ { "event": "At the beginning of the video, the camera focuses on the front windshield of a cockpit. The pilot is a male wearing a hat, headphones, and sunglasses. The cockpit shakes violently, and there are numerous scratches on the front windshield.", "visual_elements": [ { "content": "The camera focuses on the front windshield of a cockpit.", "type": "camera", "weight": 3 }, { "content": "The pilot is a male wearing a hat, headphones, and sunglasses.", "type": "attribute", "weight": 3 }, { "content": "The cockpit shakes violently.", "type": "action", "weight": 3 }, { "content": "There are numerous scratches on the front windshield.", "type": "attribute", "weight": 2 } ] }, { "event": "The scene cuts to the backs of two individuals carrying large backpacks. Both are dressed in heavy winter clothing. Their arms are stretched up, gripping onto something. Their bodies sway back and forth.", "visual_elements": [ { "content": "The camera cuts to the backs of two individuals carrying large backpacks.", "type": "camera", "weight": 3 }, { "content": "Both are dressed in heavy winter gear.", "type": "attribute", "weight": 2 }, { "content": "Both of their arms are stretched up, gripping something.", "type": "attribute", "weight": 3 }, { "content": "Both of them sway their bodies left and right.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a wider angle. On the right side of the scene, a helicopter hovers at the edge of a steep snowy cliff. Two people hang in the air, holding onto the cabin door with both hands. A person wearing a red hat stands at the edge of the cliff, holding a climbing rope in his right hand. The helicopter rotates to the right side of the screen, moving closer to the cliff.", "visual_elements": [ { "content": "The camera cuts to a wider angle.", "type": "camera", "weight": 3 }, { "content": "On the right side of the scene, a helicopter hovers over the edge of a steep snowy cliff.", "type": "attribute", "weight": 2 }, { "content": "Two people hang in the air, gripping the cabin door with both hands.", "type": "attribute", "weight": 3 }, { "content": "A person wearing a red hat stands at the cliff's edge, holding a climbing rope in his right hand.", "type": "attribute", "weight": 3 }, { "content": "The helicopter rotates to the right of the scene, moving closer to the cliff.", "type": "action", "weight": 3 } ] }, { "event": "The scene changes, capturing the cliff scene from inside the helicopter. The camera shakes up and down. In the foreground, a climber dressed in dark blue clothing and wearing a red hat holds a rope in his right hand. Behind him are three other climbers, with the two on the sides kneeling on one knee and the one in the middle sitting on the ground. The wind and snow are very heavy, and the weather conditions are harsh.", "visual_elements": [ { "content": "The camera cuts, capturing the cliff scene from inside the helicopter.", "type": "camera", "weight": 3 }, { "content": "The camera shakes up and down.", "type": "camera", "weight": 3 }, { "content": "In the foreground, a climber dressed in dark blue clothing and wearing a red hat holds a rope in his right hand.", "type": "attribute", "weight": 3 }, { "content": "Behind him are three other climbers, with the two on the sides kneeling on one knee and the one in the middle sitting on the ground.", "type": "scene", "weight": 2 }, { "content": "The wind and snow are very heavy, and the weather conditions are harsh", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera cuts again, showing the helicopter's nose turning towards the cliff. The climber wearing the red hat bends down, stretching out his left hand, attempting to reach the helicopter's nose.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "The helicopter rotates so that its nose is close to the cliff.", "type": "action", "weight": 3 }, { "content": "The climber wearing the red hat bends forward, stretching out his left hand, attempting to reach the helicopter's nose.", "type": "attribute", "weight": 3 } ] } ], "n_events": 5, "n_elements": 21 }, { "index": "TUNA_0178", "video_path": "VELOCITI/v_3WCcFVnEKh0_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a dim indoor scene in slow motion. A person holding a gun falls backward towards the right front of the screen, facing away from the camera. Aome white paper-like objects fall through the air.\nThe camera cuts to a frontal view of a young female dressed in dark clothing and wearing goggles. She holds a gun in both hands, with the muzzle pointing towards the camera. The female steps backward while firing two shots straight ahead.\nThe camera cuts to a frontal view of a woman dressed in a white shirt and a gray blazer, leaning against a stone wall with guns in both hands. She brings her hands together, causing the two guns to collide.\nThe camera cuts to an upper-body view of two females. On the right side of the screen, a black female loads a bullet into her gun. On the left side, a long-haired female looks down at her hands.\nThe camera cuts, capturing an outdoor scene through a fence. A black car moves slowly from left to right across the scene. In the background, a large, tree with a canopy of green leaves stands tall, surrounded by several parked cars.\nThe scene transitions and pans to the left. A black man dressed in a black suit exits the driver's side of the black car and closes the door behind him, his gaze fixed straight ahead. A man wearing sunglasses and dressed in a black suit exits the passenger side, shutting the door after him.", "events": [ { "event": "At the beginning of the video, the camera focuses on a dim indoor scene in slow motion. A person holding a gun falls backward towards the right front of the screen, facing away from the camera. Aome white paper-like objects fall through the air.", "visual_elements": [ { "content": "The camera focuses on a dim indoor scene in slow motion.", "type": "camera", "weight": 3 }, { "content": "A person holding a gun falls backwards towards the right front of the scene, facing away from the camera.", "type": "action", "weight": 3 }, { "content": "Some white paper-like objects fall through the air.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a frontal view of a young female dressed in dark clothing and wearing goggles. She holds a gun in both hands, with the muzzle pointing towards the camera. The female steps backward while firing two shots straight ahead.", "visual_elements": [ { "content": "The camera cuts to a frontal view of a young female dressed in dark clothing and wearing goggles.", "type": "camera", "weight": 3 }, { "content": "The femal holds a gun in both hands, with the muzzle pointing towards the camera.", "type": "attribute", "weight": 3 }, { "content": "The femal steps backwards and fires two shots straight ahead.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a frontal view of a woman dressed in a white shirt and a gray blazer, leaning against a stone wall with guns in both hands. She brings her hands together, causing the two guns to collide.", "visual_elements": [ { "content": "The camera cuts to a frontal view of a woman dressed in a white shirt and a gray blazer.", "type": "camera", "weight": 3 }, { "content": "The femal leans against a stone wall, holding a gun in each hand.", "type": "attribute", "weight": 3 }, { "content": "The female brings her hands together, causing the two guns to collide.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to an upper-body view of two females. On the right side of the screen, a black female loads a bullet into her gun. On the left side, a long-haired female looks down at her hands.", "visual_elements": [ { "content": "The camera cuts to an upper-body view of two females.", "type": "camera", "weight": 3 }, { "content": "On the right side of the scene, the black female loads a bullet into her gun.", "type": "action", "weight": 3 }, { "content": "On the left side, the long-haired female looks down at her hands.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts, capturing an outdoor scene through a fence. A black car moves slowly from left to right across the scene. In the background, a large, tree with a canopy of green leaves stands tall, surrounded by several parked cars.", "visual_elements": [ { "content": "The camera cuts, capturing an outdoor scene through a fence.", "type": "camera", "weight": 3 }, { "content": "A black car moves slowly from left to right across the scene.", "type": "action", "weight": 3 }, { "content": "In the background, there is a large, tree with a canopy of green leaves.", "type": "scene", "weight": 2 }, { "content": "Several cars are parked around the tree.", "type": "scene", "weight": 2 } ] }, { "event": "The scene transitions and pans to the left. A black man dressed in a black suit exits the driver's side of the black car and closes the door behind him, his gaze fixed straight ahead. A man wearing sunglasses and dressed in a black suit exits the passenger side, shutting the door after him.", "visual_elements": [ { "content": "The camera cuts and pans to the left.", "type": "camera", "weight": 3 }, { "content": "A black man dressed in a black suit exits the driver's side of the black car and closes the door behind him.", "type": "action", "weight": 3 }, { "content": "The black man's gaze is fixed straight ahead.", "type": "attribute", "weight": 3 }, { "content": "A man wearing sunglasses and dressed in a black suit exits the passenger side, shutting the door after him.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 20 }, { "index": "TUNA_0179", "video_path": "VELOCITI/v_3WCcFVnEKh0_seg_95_105.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a black brick wall, with some peeling traces visible on its surface. The background is blurred, and the environment is rather dim.\nThe scene then cuts to an upward shot from the right and below of two men. The two males stands side by side under a wooden ceiling. The man closer to the camera wears a camouflage vest and ripped jeans. The man farther from the camera has long hair and wears a dark gray short-sleeved shirt. The man closer to the camera holds a gun in both hands, while the long-haired man holds a gun in his right hand. Both men gaze ahead and fire their guns at something in front of them.\nThe camera cuts to the back of  the two men. In front of them, a woman, holding a gun in both hands, fires vigorously while advancing forward. The two men fall backward after being hit.\nNext, the camera cuts to a side view of the two men in slow motion. The long-haired man falls straight backward, while the man closer to the camera falls backward with his arms spread wide, facing the camera.\nThe camera cuts to the back of the two men. They exit the scene from the left and right sides respectively. The woman holding the gun with both hands faces the camera and continues to move closer, continuously firing her gun.\nNext, the camera cuts in slow motion. A fat man wearing a black short-sleeved shirt and a blue jacket holds a gun in his right hand, standing in front of a wooden wall. He fires his gun ahead while walking to the right side of the scene. A person wearing a black coat enters the scene from the right side, facing away from the camera. The person in the black coat holds a gun in both hands and fires towards the front.\nThe camera cuts to show a woman dressed in black lying on the ground, her right arm stretched out, firing two shots forward with her gun.\nThe camera cuts, shooting from a lower angle, showing a person wearing torn jeans standing up.\nThe camera cuts in slow motion. A woman wearing a black leather jacket and blue jeans flies in the air. She holds a white textile in her left hand and fires her gun ahead with her right hand.\nThe camera finally cuts to an empty indoor room with a blurred view.", "events": [ { "event": "At the beginning of the video, the camera focuses on a black brick wall, with some peeling traces visible on its surface. The background is blurred, and the environment is rather dim.", "visual_elements": [ { "content": "The camera focuses on a black brick wall.", "type": "camera", "weight": 3 }, { "content": "The black brick wall has some peeling traces on its surface.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred, and the environment is rather dim.", "type": "scene", "weight": 2 } ] }, { "event": "The scene then cuts to an upward shot from the right and below of two men. The two males stands side by side under a wooden ceiling. The man closer to the camera wears a camouflage vest and ripped jeans. The man farther from the camera has long hair and wears a dark gray short-sleeved shirt. The man closer to the camera holds a gun in both hands, while the long-haired man holds a gun in his right hand. Both men gaze ahead and fire their guns at something in front of them.", "visual_elements": [ { "content": "The camera cuts to an upward shot from the right and below of two men.", "type": "camera", "weight": 3 }, { "content": "The two men stands side by side under a wooden ceiling.", "type": "attribute", "weight": 2 }, { "content": "The man closer to the camera wears a camouflage vest and ripped jeans.", "type": "attribute", "weight": 3 }, { "content": "The man farther from the camera has long hair and wears a dark gray short-sleeved shirt.", "type": "attribute", "weight": 3 }, { "content": "The man closer the camera holds a gun in both hands, while the long-haired man holds a gun in his right hand.", "type": "attribute", "weight": 3 }, { "content": "Both men gaze ahead and fire their guns at something in front of them.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the back of  the two men. In front of them, a woman, holding a gun in both hands, fires vigorously while advancing forward. The two men fall backward after being hit.", "visual_elements": [ { "content": "The camera cuts to the back of  the two men in slow motion.", "type": "camera", "weight": 3 }, { "content": "A woman, holding a gun in both hands, fires vigorously while advancing forward.", "type": "action", "weight": 2 }, { "content": "The two men fall backward after being hit.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera cuts to a side view of the two men in slow motion. The long-haired man falls straight backward, while the man closer to the camera falls backward with his arms spread wide, facing the camera.", "visual_elements": [ { "content": "The camera cuts to a side view of the two men in slow motion.", "type": "scene", "weight": 2 }, { "content": "The long-haired man falls straight backward.", "type": "camera", "weight": 3 }, { "content": "The man closer to the camera falls backward with his arms spread wide, facing the camera.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the back of the two men. They exit the scene from the left and right sides respectively. The woman holding the gun with both hands faces the camera and continues to move closer, continuously firing her gun.", "visual_elements": [ { "content": "The camera cuts to the back of the two men.", "type": "camera", "weight": 3 }, { "content": "The two men exit the scene from the left and right sides respectively.", "type": "attribute", "weight": 2 }, { "content": "The woman holding the gun with both hands faces the camera and continues to move closer, continuously firing her gun.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts in slow motion. A fat man wearing a black short-sleeved shirt and a blue jacket holds a gun in his right hand, standing in front of a wooden wall. He fires his gun ahead while walking to the right side of the scene. A person wearing a black coat enters the scene from the right side, facing away from the camera. The person in the black coat holds a gun in both hands and fires towards the front.", "visual_elements": [ { "content": "The camera cuts in slow motion.", "type": "camera", "weight": 3 }, { "content": "A fat man wearing  a black short-sleeved shirt and a blue jacket holds a gun in his right hand, standing in front of a wooden wall.", "type": "attribute", "weight": 3 }, { "content": "The man fires his gun towards the front while walking to the right side of the screen.", "type": "action", "weight": 3 }, { "content": "A person wearing a black coat enters the scene from the right side, facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The person in the black coat holds a gun in both hands and fires towards the front.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to show a woman dressed in black lying on the ground, her right arm stretched out, firing two shots forward with her gun.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A woman dressed in black lies on the ground, her right arm extended, firing two shots forward with her gun.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts, shooting from a lower angle, showing a person wearing torn jeans standing up.", "visual_elements": [ { "content": "The camera cuts, shooting from a lower angle.", "type": "camera", "weight": 3 }, { "content": "A person wearing torn jeans stands up.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts in slow motion. A woman wearing a black leather jacket and blue jeans flies in the air. She holds a white textile in her left hand and fires her gun ahead with her right hand.", "visual_elements": [ { "content": "The camera cuts in slow motion.", "type": "camera", "weight": 3 }, { "content": "A woman dressed in a black leather jacket and blue jeans flies in the air.", "type": "attribute", "weight": 3 }, { "content": "The woman holds a white textile in her left hand and fires her gun ahead with her right hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera finally cuts to an empty indoor room with a blurred view.", "visual_elements": [ { "content": "The camera finally cuts to an empty indoor room.", "type": "camera", "weight": 3 }, { "content": "The scene is blurred.", "type": "scene", "weight": 3 } ] } ], "n_events": 10, "n_elements": 32 }, { "index": "TUNA_0180", "video_path": "VELOCITI/v_3_dGBLwXBIE_seg_30_40.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the shoulders and above of a man wearing a dark coat from the front. The man first looks upwards and then downwards. The background is blurred, and the lighting is dim.\nNext, the camera cuts to show a middle-aged woman in a white top on the left side of the scene. Above the middle-aged woman, a man's head can be seen sticking out. On the right side of the scene, there is a long-haired woman wearing glasses. To the right of the long-haired woman, there stands a young man who laughs. The middle-aged woman turns her head to the left to look at the laughing man.\nThe camera cuts back to the man in the dark coat, whose expression is terrified. The man in the dark coat raises his body upwards.\nThe camera cuts to behind the man in the dark coat, shooting the entire scene from a downward angle. In the scene, there is a wall adorned with intricate patterns. The man stands facing away from the camera, positioned outside a window on the wall. He grabs the top of the window with both hands and, using his legs for leverage, jumps into the window. At the same time, many slender metal cylinders fly towards the wall and insert into it.\nNext, the camera transitions to a new scene, panning to the left and down. The lighting in the room is orange. A person jumps into the room from the window on the top right of the scene. Next to the window, a mana man grabs the wall with both hands, his feet off the ground.The jumping man grabs the back of the shirt of the man next to the window and pulls him downwards. Finally, the camera shakes as it pans forward. The jumping man pushes the other man against the wall ahead.", "events": [ { "event": "At the beginning of the video, the camera focuses on the shoulders and above of a man wearing a dark coat from the front. The man first looks upwards and then downwards. The background is blurred, and the lighting is dim.", "visual_elements": [ { "content": "The camera focuses on the shoulders and above of a man wearing a dark coat from the front.", "type": "camera", "weight": 3 }, { "content": "The man first looks upwards and then downwards.", "type": "action", "weight": 3 }, { "content": "The background is blurred, and the lighting is dim.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts to show a middle-aged woman in a white top on the left side of the scene. Above the middle-aged woman, a man's head can be seen sticking out. On the right side of the scene, there is a long-haired woman wearing glasses. To the right of the long-haired woman, there stands a young man who laughs. The middle-aged woman turns her head to the left to look at the laughing man.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "On the left side of the screen, there is a middle-aged woman wearing a white top.", "type": "attribute", "weight": 3 }, { "content": "Above the middle-aged woman, a man's head can be seen sticking out.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the screen, there is a long-haired woman wearing glasses.", "type": "attribute", "weight": 3 }, { "content": "To the right of the long-haired woman, there is a young man.", "type": "attribute", "weight": 3 }, { "content": "The young man laughs.", "type": "action", "weight": 3 }, { "content": "The middle-aged woman turns her head to the left and looks at the laughing man.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the man in the dark coat, whose expression is terrified. The man in the dark coat raises his body upwards.", "visual_elements": [ { "content": "The camera cuts back to the man in the dark coat.", "type": "camera", "weight": 3 }, { "content": "The man in the dark coat has a terrified expression.", "type": "attribute", "weight": 3 }, { "content": "The man in the dark coat raises his body upwards.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to behind the man in the dark coat, shooting the entire scene from a downward angle. In the scene, there is a wall adorned with intricate patterns. The man stands facing away from the camera, positioned outside a window on the wall. He grabs the top of the window with both hands and, using his legs for leverage, jumps into the window. At the same time, many slender metal cylinders fly towards the wall and insert into it.", "visual_elements": [ { "content": "The camera cuts to behind the man in the dark coat, shooting the entire scene from below.", "type": "camera", "weight": 3 }, { "content": "In the scene, there is a wall with many intricate patterns.", "type": "attribute", "weight": 2 }, { "content": "The man stands outside a window on the wall, facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The man grabs the top of the window with both hands and jumps into the window with a push from his legs.", "type": "action", "weight": 3 }, { "content": "At the same time, many slender metal cylinders fly towards the wall and insert into it.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera transitions to a new scene, panning to the left and down. The lighting in the room is orange. A person jumps into the room from the window on the top right of the scene. Next to the window, a mana man grabs the wall with both hands, his feet off the ground.The jumping man grabs the back of the shirt of the man next to the window and pulls him downwards. Finally, the camera shakes as it pans forward. The jumping man pushes the other man against the wall ahead.", "visual_elements": [ { "content": "The camera cuts to a new scene and pans down to the left.", "type": "camera", "weight": 3 }, { "content": "The lighting in the room is orange.", "type": "scene", "weight": 2 }, { "content": "A person jumps into the room from the window on the top right side of the screen.", "type": "action", "weight": 3 }, { "content": "Next to the window, a man grabs the wall with both hands, his feet off the ground.", "type": "attribute", "weight": 2 }, { "content": "The jumping man grabs the back of the shirt of the man next to the window and pulls him downwards.", "type": "action", "weight": 3 }, { "content": "The camera shakes as it pans forward.", "type": "camera", "weight": 3 }, { "content": "The jumping man pushes the other man against the wall ahead.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 25 }, { "index": "TUNA_0181", "video_path": "VELOCITI/v_3bo6h-7ryfE_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a forest, then pans to the left. In front of the scene are some bare trees. Behind the trees, a soldier wearing a white hat and another soldier holding a gun can be seen. On the left side of the scene, two soldiers march forward with their backs to the camera. The air is filled with smoke.\nThe scene cuts, then pans upwards and to the left. There are four soldiers in green military uniforms and helmets in the scene. The four soldiers run towards the camera with their guns. One soldier on the left side of the scene falls backwards after being hit by a bullet.\nThe camera cuts again and begins to shake. A soldier wearing a white hat is on the left side of the scene, facing right towards the camera. Initially, the soldier with the white hat looks forward. Then he turns his head to the right, facing the camera. Finally, he looks forward again and waves his left hand. Behind him, some soldiers wearing red berets stand with their guns raised behind a row of large weapons.\nThe camera cuts to two soldiers wearing red berets who have their backs to the camera. The soldier farther from the camera falls backwards after being hit from behind. The other soldier quickly turns around and aims his gun forward.\nThe camera cuts to a shot of some soldiers wearing helmets from the front. They are marching towards the camera. The soldier at the very front fires a shot forward.\nThe camera cuts to some soldiers wearing red berets, filmed from the right side of the soldiers. One soldier throws a grenade forward.\nThe camera cuts back to a shot of a group of soldiers wearing helmets marching towards the camera from the front. A shell explodes in front of the soldiers, blowing up the ground. The soldier at the very front is blown down. A soldier wearing a white hat enters from the bottom left corner of the scene and runs to the right side. The soldier with the white hat faces left towards the camera and shouts towards the front.", "events": [ { "event": "At the beginning of the video, the camera focuses on a forest, then pans to the left. In front of the scene are some bare trees. Behind the trees, a soldier wearing a white hat and another soldier holding a gun can be seen. On the left side of the scene, two soldiers march forward with their backs to the camera. The air is filled with smoke.", "visual_elements": [ { "content": "The camera focuses on a forest.", "type": "camera", "weight": 3 }, { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "In front of the scene are some bare trees.", "type": "scene", "weight": 2 }, { "content": "Behind the trees, a soldier wearing a white hat and another soldier holding a gun can be seen.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the scene, two soldiers march forward with their backs to the camera.", "type": "action", "weight": 2 }, { "content": "The air is filled with smoke.", "type": "scene", "weight": 2 } ] }, { "event": "The scene cuts, then pans upwards and to the left. There are four soldiers in green military uniforms and helmets in the scene. The four soldiers run towards the camera with their guns. One soldier on the left side of the scene falls backwards after being hit by a bullet.", "visual_elements": [ { "content": "The scene cuts, then pans upwards and to the left.", "type": "camera", "weight": 3 }, { "content": "There are four soldiers in light-colored military uniforms and helmets in the scene.", "type": "attribute", "weight": 3 }, { "content": "The four soldiers run towards the camera with their guns.", "type": "action", "weight": 3 }, { "content": "One soldier on the left side of the scene falls backwards after being hit by a bullet.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts again and begins to shake. A soldier wearing a white hat is on the left side of the scene, facing right towards the camera. Initially, the soldier with the white hat looks forward. Then he turns his head to the right, facing the camera. Finally, he looks forward again and waves his left hand. Behind him, some soldiers wearing red berets stand with their guns raised behind a row of large weapons.", "visual_elements": [ { "content": "The camera cuts again and begins to shake.", "type": "camera", "weight": 3 }, { "content": "A soldier wearing a white hat is on the left side of the scene, facing right towards the camera.", "type": "attribute", "weight": 3 }, { "content": "Initially, the soldier with the white hat looks forward. Then he turns his head to the right, facing the camera.", "type": "action", "weight": 2 }, { "content": "Finally, the soldier with the white hat looks forward again and waves his left hand.", "type": "action", "weight": 3 }, { "content": "Behind him, some soldiers wearing red berets stand with their guns raised behind a row of large weapons.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to two soldiers wearing red berets who have their backs to the camera. The soldier farther from the camera falls backwards after being hit from behind. The other soldier quickly turns around and aims his gun forward.", "visual_elements": [ { "content": "The camera cuts to two soldiers wearing red berets who have their backs to the camera.", "type": "camera", "weight": 3 }, { "content": "The soldier farther from the camera falls backwards after being hit from behind.", "type": "action", "weight": 2 }, { "content": "The other soldier quickly turns around and aims his gun forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a shot of some soldiers wearing helmets from the front. They are marching towards the camera. The soldier at the very front fires a shot forward.", "visual_elements": [ { "content": "The camera cuts to a shot of some soldiers wearing helmets from the front.", "type": "camera", "weight": 3 }, { "content": "A group of soldiers wearing helmets are marching towards the camera.", "type": "action", "weight": 3 }, { "content": "The soldier at the very front fires a shot forward.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to some soldiers wearing red berets, filmed from the right side of the soldiers. One soldier throws a grenade forward.", "visual_elements": [ { "content": "The camera cuts to some soldiers wearing red berets, filmed from the right side of the soldiers.", "type": "camera", "weight": 3 }, { "content": "One soldier throws a grenade forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to a shot of a group of soldiers wearing helmets marching towards the camera from the front. A shell explodes in front of the soldiers, blowing up the ground. The soldier at the very front is blown down. A soldier wearing a white hat enters from the bottom left corner of the scene and runs to the right side. The soldier with the white hat faces left towards the camera and shouts towards the front.", "visual_elements": [ { "content": "The camera cuts back to a shot of a group of soldiers wearing helmets marching towards the camera from the front.", "type": "camera", "weight": 3 }, { "content": "A shell explodes in front of the soldiers, blowing up the ground.", "type": "action", "weight": 3 }, { "content": "The soldier at the very front is blown down.", "type": "action", "weight": 3 }, { "content": "A soldier wearing a white hat enters from the bottom left corner of the scene and runs to the right side.", "type": "action", "weight": 3 }, { "content": "The soldier with the white hat faces left towards the camera and shouts towards the front.", "type": "action", "weight": 2 } ] } ], "n_events": 7, "n_elements": 28 }, { "index": "TUNA_0182", "video_path": "VELOCITI/v_3cTyY36ENxY_seg_30_40.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a blonde male boxer. The camera zooms in. The blonde boxer is bare-chested, with well-developed muscles. Red bruises surround his eyes. The background is blurred, and the spectators in the stands are cheering.\nNext, the camera cuts and moves upwards. A black-haired male boxer sits in a corner of the boxing ring. The black-haired boxer wears red boxing gloves and red-and-white striped sports shorts. The black-haired boxer slowly stands up. Ahead of him, there is a male referee, who is relatively fat, wearing a white shirt with a black bowtie on his chest. He approaches the black-haired boxer and holds his wrists with both hands. In the background, there are two photographers capturing the scene in the boxing ring.\nThen, the camera cuts back to the blonde man, shooting upwards from a lower angle. He wears red gloves, and there are many bright lights in the background.\nThe camera cuts back to the black-haired boxer, shooting from his left side. The camera cuts to the left. The black-haired boxer gazes ahead, walking towards the left side of the screen. The referee faces the camera, stepping backwards. On the left side of the screen, the blonde man quickly punches with his right hand, hitting the black-haired boxer's head. The black-haired boxer's head turns to the right and he falls down.\nThe camera quickly moves upwards to the right and cuts scenes. It focuses on a female spectator wearing a white coat and a blue lining. The woman closes her eyes. Behind the woman in the white coat, another female spectator watches ahead, looking delighted. There are more spectators in the background.", "events": [ { "event": "At the beginning of the video, the camera focuses on a blonde male boxer. The camera zooms in. The blonde boxer is bare-chested, with well-developed muscles. Red bruises surround his eyes. The background is blurred, and the spectators in the stands are cheering.", "visual_elements": [ { "content": "The camera focuses on a blonde male boxer.", "type": "camera", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The blonde man is bare-chested, with well-developed muscles.", "type": "attribute", "weight": 3 }, { "content": "Red bruises surround the blonde boxer's eyes.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred, and the spectators in the stands are cheering.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts and moves upwards. A black-haired male boxer sits in a corner of the boxing ring. The black-haired boxer wears red boxing gloves and red-and-white striped sports shorts. The black-haired boxer slowly stands up. Ahead of him, there is a male referee, who is relatively fat, wearing a white shirt with a black bowtie on his chest. He approaches the black-haired boxer and holds his wrists with both hands. In the background, there are two photographers capturing the scene in the boxing ring.", "visual_elements": [ { "content": "The camera cuts and moves upwards.", "type": "camera", "weight": 3 }, { "content": "A black-haired male boxer sits in a corner of the boxing ring.", "type": "attribute", "weight": 3 }, { "content": "The black-haired boxer wears red boxing gloves and red-and-white striped sports shorts.", "type": "attribute", "weight": 2 }, { "content": "The black-haired boxer slowly stands up.", "type": "action", "weight": 3 }, { "content": "Ahead of the black-haired boxer, there is a male referee, who is relatively fat, wearing a white shirt with a black bowtie on his chest.", "type": "attribute", "weight": 3 }, { "content": "The referee approaches the black-haired boxer and holds his wrists with both hands.", "type": "action", "weight": 3 }, { "content": "In the background, there are two photographers capturing the scene in the boxing ring.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera cuts back to the blonde man, shooting upwards from a lower angle. He wears red gloves, and there are many bright lights in the background.", "visual_elements": [ { "content": "The camera cuts back to the blonde man, shooting upwards from a lower angle.", "type": "camera", "weight": 3 }, { "content": "The blonde man wears red gloves.", "type": "attribute", "weight": 3 }, { "content": "There are many bright lights in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the black-haired boxer, shooting from his left side. The camera cuts to the left. The black-haired boxer gazes ahead, walking towards the left side of the screen. The referee faces the camera, stepping backwards. On the left side of the screen, the blonde man quickly punches with his right hand, hitting the black-haired boxer's head. The black-haired boxer's head turns to the right and he falls down.", "visual_elements": [ { "content": "The camera cuts back to the black-haired boxer, shooting from his left side.", "type": "camera", "weight": 3 }, { "content": "The camera cuts to the left.", "type": "camera", "weight": 3 }, { "content": "The black-haired boxer gazes ahead, walking towards the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The referee faces the camera, stepping backwards.", "type": "action", "weight": 2 }, { "content": "On the left side of the screen, the blonde man quickly punches with his right hand, hitting the black-haired boxer's head.", "type": "action", "weight": 3 }, { "content": "The black-haired boxer's head turns to the right and he falls down.", "type": "action", "weight": 3 } ] }, { "event": "The camera quickly moves upwards to the right and cuts scenes. It focuses on a female spectator wearing a white coat and a blue lining. The woman closes her eyes. Behind the woman in the white coat, another female spectator watches ahead, looking delighted. There are more spectators in the background.", "visual_elements": [ { "content": "The camera quickly moves upwards to the right and cuts scenes.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on a female spectator wearing a white coat and a blue lining.", "type": "camera", "weight": 3 }, { "content": "The woman closes her eyes.", "type": "action", "weight": 3 }, { "content": "Behind the woman in the white coat, another female spectator watches ahead, looking delighted.", "type": "attribute", "weight": 2 }, { "content": "There are more spectators in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 5, "n_elements": 26 }, { "index": "TUNA_0183", "video_path": "VELOCITI/v_3i76M1f3nB4_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera shows two male characters. The man on the left side of the screen is facing right, wearing a light blue shirt and kneeling on both knees. The man in the center of the screen is facing away from the camera, wearing a light-colored T-shirt, and looking down at a person lying on the ground in front of him. The background is a lush green jungle with bright sunshine. On the ground to the right of the man in the light blue shirt, there is a bottle filled with brown liquid, and to his left, there is a wooden rack tied with ropes. n a light blue shirt holds something in both hands as he moves.\nThe camera cuts to a close-up of the lying man's face. He rests on a backpack, with obvious blood on his forehead. Tears are in his eyes, and he bites a leather strap.\nThe camera cuts again. The man in the light blue shirt raises a white cloth-wrapped object above his head backwards. The man in the light-colored T-shirt on the left side of the screen is facing right, and he has curly hair.\nNext, the camera cuts back to a close-up of the lying man. He has a painful expression and is breathing rapidly.\nThe camera cuts to the man in the light blue shirt, who hits the object in his hand hard downwards.\nThe camera cuts, capturing the entire body of the lying man from his left side. The man in the light blue shirt, who is on the right side of the lying man, strikes the object in his hand violently onto the lying man's bloodied right leg. The lying man's left leg is also bloodied. A pair of hands on the left side of the screen is lifting the lying man's right foot.\nThe camera cuts to the lying man, who bites the strap and screams, his head thrown back and trembling continuously.\nFinally, the camera cuts to the man in the light-colored T-shirt. The background is a blurred green jungle.", "events": [ { "event": "At the beginning of the video, the camera shows two male characters. The man on the left side of the screen is facing right, wearing a light blue shirt and kneeling on both knees. The man in the center of the screen is facing away from the camera, wearing a light-colored T-shirt, and looking down at a person lying on the ground in front of him. The background is a lush green jungle with bright sunshine. On the ground to the right of the man in the light blue shirt, there is a bottle filled with brown liquid, and to his left, there is a wooden rack tied with ropes. n a light blue shirt holds something in both hands as he moves.", "visual_elements": [ { "content": "The camera focuses on the two male characters.", "type": "camera", "weight": 3 }, { "content": "The man on the left side of the screen is facing right, wearing a light blue shirt and kneeling on both knees.", "type": "attribute", "weight": 3 }, { "content": "The man in the center of the screen is facing away from the camera, wearing a light-colored T-shirt.", "type": "attribute", "weight": 3 }, { "content": "The man in the light-colored T-shirt is looking down at a person lying on the ground in front of him.", "type": "attribute", "weight": 2 }, { "content": "The background is a lush green jungle with bright sunshine.", "type": "scene", "weight": 2 }, { "content": "On the ground to the right of the man in the light blue shirt, there is a bottle filled with brown liquid.", "type": "scene", "weight": 1 }, { "content": "To the left of the man in the light blue shirt, there is a wooden rack tied with ropes.", "type": "scene", "weight": 2 }, { "content": "The man in a light blue shirt holds something in both hands as he moves.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a close-up of the lying man's face. He rests on a backpack, with obvious blood on his forehead. Tears are in his eyes, and he bites a leather strap.", "visual_elements": [ { "content": "The camera cuts to a close-up of the lying man's face.", "type": "camera", "weight": 3 }, { "content": "The lying man rests on a backpack.", "type": "attribute", "weight": 2 }, { "content": "There is obvious blood on the lying man's forehead.", "type": "attribute", "weight": 2 }, { "content": "Tears are in the lying man's eyes, and he bites a leather strap.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts again. The man in the light blue shirt raises a white cloth-wrapped object above his head backwards. The man in the light-colored T-shirt on the left side of the screen is facing right, and he has curly hair.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man in the light blue shirt raises a white cloth-wrapped object above his head backwards.", "type": "action", "weight": 3 }, { "content": "The man in the light-colored T-shirt on the left side of the screen is facing right.", "type": "attribute", "weight": 3 }, { "content": "The man in the light-colored T-shirt has curly hair.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera cuts back to a close-up of the lying man. He has a painful expression and is breathing rapidly.", "visual_elements": [ { "content": "The camera cuts back to a close-up of the lying man.", "type": "camera", "weight": 3 }, { "content": "The lying man has a painful expression and is breathing rapidly.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to the man in the light blue shirt, who hits the object in his hand hard downwards.", "visual_elements": [ { "content": "The camera cuts to the man in the light blue shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the light blue shirt hits the object in his hand hard downwards.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts, capturing the entire body of the lying man from his left side. The man in the light blue shirt, who is on the right side of the lying man, strikes the object in his hand violently onto the lying man's bloodied right leg. The lying man's left leg is also bloodied. A pair of hands on the left side of the screen is lifting the lying man's right foot.", "visual_elements": [ { "content": "The camera cuts, capturing the entire body of the lying man from his left side.", "type": "camera", "weight": 3 }, { "content": "The man in the light blue shirt, who is on the right side of the lying man, strikes the object in his hand violently onto the lying man's bloodied right leg.", "type": "action", "weight": 3 }, { "content": "The lying man's left leg is also bloodied.", "type": "attribute", "weight": 2 }, { "content": "A pair of hands on the left side of the screen is lifting the lying man's right foot.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to the lying man, who bites the strap and screams, his head thrown back and trembling continuously.", "visual_elements": [ { "content": "The camera cuts back to the lying man.", "type": "camera", "weight": 3 }, { "content": "The lying man bites the strap and screams, his head thrown back and trembling continuously.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to the man in the light-colored T-shirt. The background is a blurred green jungle.", "visual_elements": [ { "content": "Finally, the camera cuts to the man in the light-colored T-shirt.", "type": "camera", "weight": 3 }, { "content": "The background is a blurred green jungle.", "type": "scene", "weight": 2 } ] } ], "n_events": 8, "n_elements": 28 }, { "index": "TUNA_0184", "video_path": "VELOCITI/v_3nRy9hw7tr0_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a disheveled young man. The man wears a dark green zip-up jacket. He speaks to someone across from him. Suddenly, he stops talking and looks to his right front.\nThe camera then cuts to a young woman with long hair. She wears a gray sweater and a green undershirt. She sits at a table, looking down and flipping through a book.\nThe camera cuts back to the man in the dark green jacket.\nThe camera cuts to a man wearing a brown hoodie and a white undershirt, who speaks to someone across from him. Behind the man in the brown hoodie, the young woman sitting at the table reading can be seen.\nNext, the camera returns to the man in the dark green jacket. He props himself up with both hands and leans forward while talking.\nThe camera cuts back to the man in the brown hoodie.\nFinally, the camera cuts to the left rear of the hoodie-wearing man. The man in the dark green jacket and the man in the brown hoodie are seated face-to-face on opposite sides of a dining table, with condiments placed on the table. The man in the dark green jacket has his right hand resting on the table, and his left index finger points at the man in the brown hoodie.", "events": [ { "event": "At the beginning of the video, the camera focuses on a disheveled young man. The man wears a dark green zip-up jacket. He speaks to someone across from him. Suddenly, he stops talking and looks to his right front.", "visual_elements": [ { "content": "The camera focuses on a disheveled young man.", "type": "camera", "weight": 3 }, { "content": "The man wears a dark green zip-up jacket.", "type": "attribute", "weight": 3 }, { "content": "The man speaks to someone across from him.", "type": "action", "weight": 3 }, { "content": "Suddenly, he stops talking and looks to his right front.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a young woman with long hair. She wears a gray sweater and a green undershirt. She sits at a table, looking down and flipping through a book.", "visual_elements": [ { "content": "The camera cuts to a young woman with long hair.", "type": "camera", "weight": 3 }, { "content": "The woman wears a gray sweater and a green undershirt.", "type": "attribute", "weight": 3 }, { "content": "The woman sits at a table, looking down and flipping through a book.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts back to the man in the dark green jacket.", "visual_elements": [ { "content": "The camera cuts back to the man in the dark green jacket.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to a man wearing a brown hoodie and a white undershirt, who speaks to someone across from him. Behind the man in the brown hoodie, the young woman sitting at the table reading can be seen.", "visual_elements": [ { "content": "The camera cuts to a man wearing a brown hoodie and a white undershirt.", "type": "camera", "weight": 3 }, { "content": "The man speaks to someone across from him.", "type": "action", "weight": 3 }, { "content": "Behind the man in the brown hoodie, the young woman sitting at the table reading can be seen.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera returns to the man in the dark green jacket. He props himself up with both hands and leans forward while talking.", "visual_elements": [ { "content": "The camera returns to the man in the dark green jacket.", "type": "camera", "weight": 3 }, { "content": "The man in the dark green jacket props himself up with both hands and leans forward while talking.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the man in the brown hoodie.", "visual_elements": [ { "content": "The camera cuts back to the man in the brown hoodie.", "type": "camera", "weight": 3 } ] }, { "event": "Finally, the camera cuts to the left rear of the hoodie-wearing man. The man in the dark green jacket and the man in the brown hoodie are seated face-to-face on opposite sides of a dining table, with condiments placed on the table. The man in the dark green jacket has his right hand resting on the table, and his left index finger points at the man in the brown hoodie.", "visual_elements": [ { "content": "The camera cuts to the left rear of the hoodie-wearing man.", "type": "camera", "weight": 3 }, { "content": "The man in the dark green jacket and the man in the brown hoodie are seated face-to-face on opposite sides of a dining table.", "type": "attribute", "weight": 3 }, { "content": "Condiments are placed on the table.", "type": "attribute", "weight": 2 }, { "content": "The man in the dark green jacket has his right hand resting on the table, and his left index finger points at the man in the brown hoodie.", "type": "attribute", "weight": 3 } ] } ], "n_events": 7, "n_elements": 18 }, { "index": "TUNA_0185", "video_path": "VELOCITI/v_3qp3AeWmt38_seg_145_155.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is obscured by a dark object, making the image blurry. As the obstruction moves away, a group of people fighting in the snow comes into view. Two groups of people rush towards each other from the left and right sides of the screen, respectively. Among them, there are a few wolves in the group rushing from the left side of the screen to the right. Snowflakes scatter all over the snowy ground. Dark clouds loom in the sky.\nThe camera cuts and pans to the right, capturing a young blond man in a black coat running forward. The blond man grabs a black-clad person in front of him and then jumps up to kick forward.\nThe camera cuts to an overhead view. The black-clad person is thrown to the ground by the blond man. The blond man quickly gets up and kneels on the snowy ground.\nThe camera cuts to a close-up shot of the blond man's face.\nThe camera then cuts to a front view of a blond woman, who pulls the collar of a person opposite her with her right hand. As the camera pans down to the left, the blond woman forcefully pushes the person opposite her to the ground.\nThe camera cuts to a closer angle. The blond man throws the black-clad person to the left side of the screen and onto the ground.\nThe camera cuts, showing a yellow wolf rushing from the right side of the screen towards a black-clad person in the middle. The black-clad person manages to evade the yellow wolf's attack, and the yellow wolf exits from the left side of the screen. A gray wolf then rushes at the black-clad person from the right side and tackles him to the ground. In the background, many people fights.", "events": [ { "event": "At the beginning of the video, the camera is obscured by a dark object, making the image blurry. As the obstruction moves away, a group of people fighting in the snow comes into view. Two groups of people rush towards each other from the left and right sides of the screen, respectively. Among them, there are a few wolves in the group rushing from the left side of the screen to the right. Snowflakes scatter all over the snowy ground. Dark clouds loom in the sky.", "visual_elements": [ { "content": "The screen is obscured by a dark object, making the image blurry.", "type": "attribute", "weight": 3 }, { "content": "As the obstruction moves away, a group of people fighting in the snow comes into view.", "type": "attribute", "weight": 3 }, { "content": "Two groups of people rush towards each other from the left and right sides of the screen.", "type": "action", "weight": 3 }, { "content": "Among them, there are a few wolves in the group rushing from the left side of the screen to the right.", "type": "attribute", "weight": 2 }, { "content": "Snowflakes scatter all over the snowy ground.", "type": "attribute", "weight": 2 }, { "content": "Dark clouds loom in the sky.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts and pans to the right, capturing a young blond man in a black coat running forward. The blond man grabs a black-clad person in front of him and then jumps up to kick forward.", "visual_elements": [ { "content": "The camera cuts and pans to the right.", "type": "camera", "weight": 3 }, { "content": "A young blond man in a black coat is running forward.", "type": "action", "weight": 3 }, { "content": "The blond man grabs a black-clad person in front of him and then jumps up to kick forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to an overhead view. The black-clad person is thrown to the ground by the blond man. The blond man quickly gets up and kneels on the snowy ground.", "visual_elements": [ { "content": "The camera cuts to an overhead view.", "type": "camera", "weight": 3 }, { "content": "The black-clad person is thrown to the ground by the blond man.", "type": "action", "weight": 3 }, { "content": "The blond man quickly gets up and kneels on the snowy ground.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a close-up shot of the blond man's face.", "visual_elements": [ { "content": "The camera cuts to a close-up shot of the blond man's face.", "type": "camera", "weight": 3 } ] }, { "event": "The camera then cuts to a front view of a blond woman, who pulls the collar of a person opposite her with her right hand. As the camera pans down to the left, the blond woman forcefully pushes the person opposite her to the ground.", "visual_elements": [ { "content": "The camera then cuts to a front view of a blond woman.", "type": "camera", "weight": 3 }, { "content": "The woman pulls the collar of a person opposite her with her right hand.", "type": "attribute", "weight": 2 }, { "content": "The camera pans down to the left.", "type": "camera", "weight": 3 }, { "content": "The blond woman forcefully pushes the person opposite her to the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a closer angle. The blond man throws the black-clad person to the left side of the screen and onto the ground.", "visual_elements": [ { "content": "The camera cuts to a closer angle.", "type": "camera", "weight": 3 }, { "content": "The blond man throws the black-clad person to the left side of the screen and onto the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts, showing a yellow wolf rushing from the right side of the screen towards a black-clad person in the middle. The black-clad person manages to evade the yellow wolf's attack, and the yellow wolf exits from the left side of the screen. A gray wolf then rushes at the black-clad person from the right side and tackles him to the ground. In the background, many people fights.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A yellow wolf rushes from the right side of the screen towards a black-clad person in the middle.", "type": "action", "weight": 3 }, { "content": "The black-clad person manages to evade the yellow wolf's attack.", "type": "action", "weight": 3 }, { "content": "The yellow wolf exits from the left side of the screen.", "type": "action", "weight": 2 }, { "content": "A gray wolf then rushes at the black-clad person from the right side and tackles him to the ground.", "type": "action", "weight": 3 }, { "content": "In the background, many people fights.", "type": "scene", "weight": 2 } ] } ], "n_events": 7, "n_elements": 25 }, { "index": "TUNA_0186", "video_path": "VELOCITI/v_3tXDymBcnJY_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the front of an elderly male dressed in a blue shirt with a dark blue striped tie. The elderly male stares straight ahead with a blank expression. To the elderly male's right rear, there is a bald middle-aged male dressed in a dark suit and wearing glasses. The bald male first nods to his left, then smiles towards the camera. To the elderly male's left rear, there is a young male dressed in a dark suit jacket and a blue shirt. The young male faces the camera, nods with a smile, and then turns his head to his right.\nThe scene then shifts to a standing black male facing the camera. The black male wears a black suit and a white shirt with a four-colored striped tie, and a small American flag pin on his chest. The black male turns his head to his left. The background is a blue curtain.\nNext, the camera cuts to behind the black male with the pin. A black male wearing a black suit and a silver earring on his left ear spreads his arms wide and quickly walks towards the black male with the pin. They hug each other and pat each other's backs.\nFinally, the camera cuts. The two greet each other with unique handshake and hand-reaching gestures, and eventually both extend their right thumbs and index fingers pointing at each other. The black male with the pin drops his right hand, turns towards the camera, and smiles while looking ahead. The black male with the silver earring also turns towards the camera, smiles while looking ahead, and points both hands forward.", "events": [ { "event": "At the beginning of the video, the camera focuses on the front of an elderly male dressed in a blue shirt with a dark blue striped tie. The elderly male stares straight ahead with a blank expression. To the elderly male's right rear, there is a bald middle-aged male dressed in a dark suit and wearing glasses. The bald male first nods to his left, then smiles towards the camera. To the elderly male's left rear, there is a young male dressed in a dark suit jacket and a blue shirt. The young male faces the camera, nods with a smile, and then turns his head to his right.", "visual_elements": [ { "content": "The camera focuses on the front of an elderly male dressed in a blue shirt with a dark blue striped tie.", "type": "camera", "weight": 3 }, { "content": "The elderly male has a poker face as he stares straight ahead.", "type": "attribute", "weight": 3 }, { "content": "Behind him to the right, there stands a bald middle-aged male dressed in a dark suit and wearing glasses.", "type": "attribute", "weight": 3 }, { "content": "The bald male first nods to his left and then smiles towards the camera.", "type": "action", "weight": 3 }, { "content": "Behind him to the left, there is a young male dressed in a dark suit jacket and a blue shirt.", "type": "attribute", "weight": 3 }, { "content": "The young male faces the camera, nods with a smile, and then turns his head to his right.", "type": "action", "weight": 3 } ] }, { "event": "The scene then shifts to a standing black male facing the camera. The black male wears a black suit and a white shirt with a four-colored striped tie, and a small American flag pin on his chest. The black male turns his head to his left. The background is a blue curtain.", "visual_elements": [ { "content": "The camera cuts to the front of a standing black male.", "type": "camera", "weight": 3 }, { "content": "The black male wears a black suit and a white shirt, with a four-colored striped tie tied around his neck.", "type": "attribute", "weight": 3 }, { "content": "A small American flag pin is attached to the black male's chest.", "type": "attribute", "weight": 2 }, { "content": "The black male turns his head to his left.", "type": "action", "weight": 3 }, { "content": "The background is a blue curtain.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts to behind the black male with the pin. A black male wearing a black suit and a silver earring on his left ear spreads his arms wide and quickly walks towards the black male with the pin. They hug each other and pat each other's backs.", "visual_elements": [ { "content": "The camera cuts to behind the black male wearing the pin.", "type": "camera", "weight": 3 }, { "content": "A black male wearing a black suit and a silver earring on his left ear stretches out his arms and quickly walks towards the black male wearing the pin.", "type": "action", "weight": 3 }, { "content": "The two men hug each other and pat each other's backs.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts. The two greet each other with unique handshake and hand-reaching gestures, and eventually both extend their right thumbs and index fingers pointing at each other. The black male with the pin drops his right hand, turns towards the camera, and smiles while looking ahead. The black male with the silver earring also turns towards the camera, smiles while looking ahead, and points both hands forward.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The two men greet each other with unique hand-clapping and hand-reaching gestures.", "type": "action", "weight": 3 }, { "content": "Finally, both men point at each other with their right thumbs and index fingers.", "type": "action", "weight": 3 }, { "content": "The black male wearing the pin lowers his right hand, turns towards the camera, and smiles while looking ahead.", "type": "action", "weight": 3 }, { "content": "The black male wearing the silver earring turns towards the camera, smiles while looking ahead, and points forwards with both hands.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0187", "video_path": "VELOCITI/v_4HOgujwklBY_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a long-haired male. He wears a gray undershirt and a dark jacket, and quickly runs towards the right side of the screen. The camera quickly rotates to the right, revealing the back of the long-haired male. Ahead of him, several policemen block his path. The long-haired male turns to his right. The camera quickly rotates to the left. Two policemen jog forward, running towards the right side of the screen.\nThe camera cuts to the left side of the long-haired male, where a white partition wall stands before him.\nThe camera cuts to a distant perspective. The long-haired male jumps onto the top of the white partition wall.\nThe camera cuts to the outside of the white partition wall,  capturing the scene from a bottom-up angle. Standing atop the wall, the long-haired male stretches out his arms and leaps down. The exterior wall of the building features a brown wooden decorative style.\nThe camera cuts to a staircase, capturing the long-haired male from above as he lands on the stairs, facing the steps with his hands supporting him. Subsequently, he turns his head to look behind him to the right, then turns back.On the ground behind the male, a male policeman dressed in a black uniform runs towards the long-haired male.\nThe camera cuts to behind the long-haired male. The policeman chases him up the stairs. The camera cuts up to the left, and the policeman behind the long-haired male reaches out and grabs him.\nFinally, the camera cuts to a close-up view. The long-haired male's right hand is extended forward, and he is about to fall.", "events": [ { "event": "At the beginning of the video, the camera focuses on a long-haired male. He wears a gray undershirt and a dark jacket, and quickly runs towards the right side of the screen. The camera quickly rotates to the right, revealing the back of the long-haired male. Ahead of him, several policemen block his path. The long-haired male turns to his right. The camera quickly rotates to the left. Two policemen jog forward, running towards the right side of the screen.", "visual_elements": [ { "content": "The camera focuses on a long-haired male.", "type": "camera", "weight": 3 }, { "content": "The long-haired male wears a gray undershirt and a dark-colored jacket.", "type": "attribute", "weight": 3 }, { "content": "The long-haired male quickly runs to the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The camera quickly rotates to the right, revealing the back of the long-haired male.", "type": "camera", "weight": 3 }, { "content": "Ahead of the long-haired male, several policemen block his path.", "type": "attribute", "weight": 3 }, { "content": "The long-haired male with long hair turns to his right.", "type": "action", "weight": 3 }, { "content": "The camera quickly rotates to the left.", "type": "camera", "weight": 3 }, { "content": "Two policemen jog forward,  running towards the right side of the screen.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the left side of the long-haired male, where a white partition wall stands before him.", "visual_elements": [ { "content": "The camera cuts to the left side of the long-haired male.", "type": "camera", "weight": 3 }, { "content": "In front of he long-haired mal is a white partition wall.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a distant perspective. The long-haired male jumps onto the top of the white partition wall.", "visual_elements": [ { "content": "The camera cuts to a distant perspective.", "type": "camera", "weight": 3 }, { "content": "The long-haired male jumps onto the top of the white partition wall.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the outside of the white partition wall,  capturing the scene from a bottom-up angle. Standing atop the wall, the long-haired male stretches out his arms and leaps down. The exterior wall of the building features a brown wooden decorative style.", "visual_elements": [ { "content": "The camera cuts to the outside of the white partition wall, capturing the scene from a bottom-up angle.", "type": "camera", "weight": 3 }, { "content": "Standing atop the wall, the long-haired male stretches out his arms and leaps down.", "type": "action", "weight": 3 }, { "content": "The exterior wall of the building features a brown wooden decor.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a staircase, capturing the long-haired male from above as he lands on the stairs, facing the steps with his hands supporting him. Subsequently, he turns his head to look behind him to the right, then turns back.On the ground behind the male, a male policeman dressed in a black uniform runs towards the long-haired male.", "visual_elements": [ { "content": "The camera cuts to a staircase, capturing the male with long hair from above.", "type": "camera", "weight": 3 }, { "content": "The long-haired male lands on the stairs, facing the steps with his hands supporting him.", "type": "action", "weight": 3 }, { "content": "The long-haired male turns his head to glance back at his right rear, then turns back.", "type": "action", "weight": 3 }, { "content": "On the ground behind the male, a male policeman in a black uniform runs towards him.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to behind the long-haired male. The policeman chases him up the stairs. The camera cuts up to the left, and the policeman behind the long-haired male reaches out and grabs him.", "visual_elements": [ { "content": "The camera cuts to behind the long-haired male.", "type": "camera", "weight": 3 }, { "content": "The policeman chases the long-haired male up the stairs.", "type": "action", "weight": 3 }, { "content": "The camera cuts up and to the left.", "type": "camera", "weight": 3 }, { "content": "The policeman behind the long-haired male reaches out and grabs him.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera cuts to a close-up view. The long-haired male's right hand is extended forward, and he is about to fall.", "visual_elements": [ { "content": "The camera cuts to a close-up view.", "type": "camera", "weight": 3 }, { "content": "The long-haired male extends his right hand forward, about to fall.", "type": "attribute", "weight": 2 } ] } ], "n_events": 7, "n_elements": 25 }, { "index": "TUNA_0188", "video_path": "VELOCITI/v_4QCMLXFfJyY_seg_140_150.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an elderly man wearing black-scened glasses, filmed from the man's right side. The man wears a blue-green suit, a white shirt, and a matching blue-green tie. He sits in a richly decorated room. On the table to his left, a lamp casts a soft glow. As he speaks, the man flips through a magazine, his head bowed. The background wall is adorned with intricate patterns.\nThe camera  cuts to an elderly man wearing a gray suit jacket and a blue shirt, filmed from the man's left side. He leans against the red sofa back. On the wall to his right, two lantern-shaped lights emit a warm yellow glow. The man speaks while looking ahead. In his right hand, he holds a newspaper and a pen. He lifts his left hand, holding glasses, and puts them on.\nThe camera cuts once again to the man in the blue-green suit. He extends his right index finger, pointing forward, and then turns his head to the right, facing the camera.\nThe camera cuts to the left side of the man in the blue-green suit. The man in the blue-green suit is positioned at the right edge of the scene, with his face and body blurred. The man in the gray suit jacket stands to the right of the man in the blue-green suit, holding a newspaper in his left hand. The man in the gray suit jacket looks at the man in the blue-green suit and then turns his head to the right, gazing ahead.", "events": [ { "event": "At the beginning of the video, the camera focuses on an elderly man wearing black-scened glasses, filmed from the man's right side. The man wears a blue-green suit, a white shirt, and a matching blue-green tie. He sits in a richly decorated room. On the table to his left, a lamp casts a soft glow. As he speaks, the man flips through a magazine, his head bowed. The background wall is adorned with intricate patterns.", "visual_elements": [ { "content": "The camera focuses on an elderly man wearing black-scened glasses, filmed from his right side.", "type": "camera", "weight": 3 }, { "content": "The man wears a blue-green suit, a white shirt, and a matching blue-green tie.", "type": "attribute", "weight": 3 }, { "content": "The man sits in a richly decorated room.", "type": "scene", "weight": 2 }, { "content": "On the table to his left, a lamp casts a soft glow.", "type": "scene", "weight": 2 }, { "content": "As the man speaks, the man flips through a magazine, his head bowed.", "type": "action", "weight": 3 }, { "content": "The background wall is adorned with intricate patterns.", "type": "scene", "weight": 2 } ] }, { "event": "The camera  cuts to an elderly man wearing a gray suit jacket and a blue shirt, filmed from the man's left side. He leans against the red sofa back. On the wall to his right, two lantern-shaped lights emit a warm yellow glow. The man speaks while looking ahead. In his right hand, he holds a newspaper and a pen. He lifts his left hand, holding glasses, and puts them on.", "visual_elements": [ { "content": "The camera  cuts to an elderly man wearing a gray suit jacket and a blue shirt, filmed from his left side.", "type": "camera", "weight": 3 }, { "content": "The man leans against the red sofa back.", "type": "attribute", "weight": 2 }, { "content": "On the wall to his right, two lantern-shaped lights emit a warm yellow glow.", "type": "scene", "weight": 2 }, { "content": "The man speaks while looking ahead.", "type": "action", "weight": 2 }, { "content": "The man holds a newspaper and a pen in his right hand,.", "type": "attribute", "weight": 1 }, { "content": "The ma lifts his left hand, holding glasses, and puts them on.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts once again to the man in the blue-green suit. He extends his right index finger, pointing forward, and then turns his head to the right, facing the camera.", "visual_elements": [ { "content": "The camera cuts once again to the man in the blue-green suit.", "type": "camera", "weight": 3 }, { "content": "The man extends his right index finger, pointing forward.", "type": "action", "weight": 3 }, { "content": "The man turns his head to the right, facing the camera.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the left side of the man in the blue-green suit. The man in the blue-green suit is positioned at the right edge of the scene, with his face and body blurred. The man in the gray suit jacket stands to the right of the man in the blue-green suit, holding a newspaper in his left hand. The man in the gray suit jacket looks at the man in the blue-green suit and then turns his head to the right, gazing ahead.", "visual_elements": [ { "content": "The camera cuts to the left side of the man in the blue-green suit.", "type": "camera", "weight": 3 }, { "content": "The man in the blue-green suit is positioned at the right edge of the scene.", "type": "attribute", "weight": 2 }, { "content": "The man in the blue-green suit has a blurred face and body.", "type": "attribute", "weight": 3 }, { "content": "The man in the gray suit jacket stands to the right of the man in the blue-green suit.", "type": "attribute", "weight": 3 }, { "content": "The man in the gray suit jacket holds a newspaper in his left hand.", "type": "attribute", "weight": 2 }, { "content": "The man in the gray suit jacket looks at the man in the blue-green suit.", "type": "attribute", "weight": 2 }, { "content": "The man in the gray suit jacket then turns his head to the right, gazing ahead.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0189", "video_path": "VELOCITI/v_4Qrs43i_S50_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a parked police car. The scene is set in a desolate area at night, with two red rotating lights atop the police car flashing simultaneously. On the left side of the scene, a man wearing a black coat stands on the side of the police car farthest from the camera. The camera pans to the left, capturing the man looking into the police car, waving his right hand, and speaking.\nNext, the camera cuts to the front of the police car and moves to the right. The man in the black coat, who is standing next to the police car, walks towards the right side of the scene. He then reaches out with both hands and presses down on a person kneeling in front of the car. The kneeling person wears a light-colored shirt and dark pants. The man in the light-colored shirt has his hands tied behind his back. The camera pans down and to the right, showing the man in the black coat forcing the man in the light-colored shirt to the ground. The camera pans to the left, capturing the man in the black coat bending down to grab the legs of the man in the light-colored shirt and flipping him over onto his back with force. The camera pans to the left, showing the man in the dark coat dragging the legs of the man in the light-colored shirt a short distance to the left side of the scene. Subsequently, the man in the black coat walks towards the right side of the scene.", "events": [ { "event": "At the beginning of the video, the camera focuses on a parked police car. The scene is set in a desolate area at night, with two red rotating lights atop the police car flashing simultaneously. On the left side of the scene, a man wearing a black coat stands on the side of the police car farthest from the camera. The camera pans to the left, capturing the man looking into the police car, waving his right hand, and speaking.", "visual_elements": [ { "content": "The camera focuses on a parked police car.", "type": "camera", "weight": 3 }, { "content": "The scene is set in a desolate area at night.", "type": "scene", "weight": 2 }, { "content": "Two red rotating lights atop the police car flash simultaneously.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the scene, a man wearing a black coat stands on the side of the police car farthest from the camera.", "type": "attribute", "weight": 3 }, { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The man looks into the police car, waves his right hand, and speaks.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to the front of the police car and moves to the right. The man in the black coat, who is standing next to the police car, walks towards the right side of the scene. He then reaches out with both hands and presses down on a person kneeling in front of the car. The kneeling person wears a light-colored shirt and dark pants. The man in the light-colored shirt has his hands tied behind his back. The camera pans down and to the right, showing the man in the black coat forcing the man in the light-colored shirt to the ground. The camera pans to the left, capturing the man in the black coat bending down to grab the legs of the man in the light-colored shirt and flipping him over onto his back with force. The camera pans to the left, showing the man in the dark coat dragging the legs of the man in the light-colored shirt a short distance to the left side of the scene. Subsequently, the man in the black coat walks towards the right side of the scene.", "visual_elements": [ { "content": "The camera cuts to the front of the police car and moves to the right.", "type": "camera", "weight": 3 }, { "content": "The man in the black coat, standing next to the police car, walks towards the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The man in the black coat reaches out with both hands and presses down on a person kneeling in front of the car.", "type": "action", "weight": 3 }, { "content": "The kneeling person wears a light-colored shirt and dark pants.", "type": "attribute", "weight": 3 }, { "content": "The man in the light-colored shirt has his hands tied behind his back.", "type": "attribute", "weight": 3 }, { "content": "The camera pans down and to the right.", "type": "camera", "weight": 3 }, { "content": "The man in the black coat forces the man in the light-colored shirt to the ground.", "type": "action", "weight": 3 }, { "content": "The camera pans up and to the left.", "type": "camera", "weight": 3 }, { "content": "The man in the black coat bends down to grab the legs of the man in the light-colored shirt and flips him over onto his back with force.", "type": "action", "weight": 3 }, { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The man in the black coat drags the legs of the man in the light-colored shirt a short distance to the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The man in the black coat walks towards the right side of the scene.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0190", "video_path": "VELOCITI/v_4Qrs43i_S50_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera captures a shot from the right side of an adult man. The man wears a black coat and light-colored pants, standing upright. He looks down, grasping the leg of another person lying on the ground with his right hand, dragging them while walking to the right side of the screen.\nNext, the camera cuts and pans down to the right. A man dressed in an orange shirt and dark pants lies on the ground. The man in the black coat stands on the left side of the lying man, bending down to speak to him. The left hand of the man in the black coat is placed on the neck of the lying man.\nSubsequently, the camera cuts. The lying man is face down, crawling on the ground. The man in the black coat presses down on top of him. The man in the black coat's right hand holds a black mallet. He adjusts his position, sitting on top of the lying man.\nFinally, the camera cuts to a front view of the standing man in the black coat. He holds the black mallet in his left hand and points forward with his right hand.", "events": [ { "event": "At the beginning of the video, the camera captures a shot from the right side of an adult man. The man wears a black coat and light-colored pants, standing upright. He looks down, grasping the leg of another person lying on the ground with his right hand, dragging them while walking to the right side of the screen.", "visual_elements": [ { "content": "The camera captures a shot from the right side of an adult man.", "type": "camera", "weight": 3 }, { "content": "The man wears a black coat and light-colored pants, standing upright.", "type": "attribute", "weight": 3 }, { "content": "The man looks down, grasping the leg of another person lying on the ground with his right hand.", "type": "attribute", "weight": 3 }, { "content": "The man drags the other lying person while walking to the right side.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts and pans down to the right. A man dressed in an orange shirt and dark pants lies on the ground. The man in the black coat stands on the left side of the lying man, bending down to speak to him. The left hand of the man in the black coat is placed on the neck of the lying man.", "visual_elements": [ { "content": "The camera cuts and pans down to the right.", "type": "camera", "weight": 3 }, { "content": "A man dressed in an orange shirt and dark pants lies on the ground.", "type": "attribute", "weight": 3 }, { "content": "The man in black coat stands on the left side of the lying man.", "type": "attribute", "weight": 2 }, { "content": "The man in black coat bends down to speak to the lying man.", "type": "attribute", "weight": 3 }, { "content": "Left hand of man in black coat is placed on the neck of the lying man.", "type": "attribute", "weight": 3 } ] }, { "event": "Subsequently, the camera cuts. The lying man is face down, crawling on the ground. The man in the black coat presses down on top of him. The man in the black coat's right hand holds a black mallet. He adjusts his position, sitting on top of the lying man.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The lying man is face down, crawling on the ground.", "type": "attribute", "weight": 3 }, { "content": "The man in black coat presses down on top of him.", "type": "attribute", "weight": 3 }, { "content": "The man in black coat's right hand holds a black mallet.", "type": "attribute", "weight": 2 }, { "content": "The man in black coat adjusts his position, sitting on top of the lying man.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to a front view of the standing man in the black coat. He holds the black mallet in his left hand and points forward with his right hand.", "visual_elements": [ { "content": "The camera cuts to a front view of the standing man in the black coat.", "type": "camera", "weight": 3 }, { "content": "The man in black coat holds the black mallet in his left hand and points forward with his right hand.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0191", "video_path": "VELOCITI/v_4_9ZfH_x1hE_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young woman seated in the driver's seat of a car. The camera captures her upper torso from the left side. She wears a green camisole and a red jacket, accessorized with large hoop earrings and a gold necklace around her neck. The woman turns her body to the right, gazing ahead.\nThe camera cuts to a man standing outside the car. He has curly short hair and wears a dark-colored coat, with his arms crossed. His gaze is directed downwards, focusing on the area in front of him. Behind him, there is a brick wall, with some green plants visible beyond it.\nThe camera cuts back to the inside of the car. The woman unbuckles her seatbelt. Her left hand rests on the steering wheel, while her right hand opens the car door.\nThe camera cuts once again to outside the car. The woman exits the vehicle and walks towards the rear of the car, facing away from the camera. After she gets out, the man standing by the passenger door drops his arms and also walks towards the rear of the car. Eventually, the man reaches the driver's side door, while the woman arrives at the passenger side door. The car is colored in shades of red and dark green. On the right side of the vehicle, there are markings that read \"Ae School of Motoring\" and \"079460795\".", "events": [ { "event": "At the beginning of the video, the camera focuses on a young woman seated in the driver's seat of a car. The camera captures her upper torso from the left side. She wears a green camisole and a red jacket, accessorized with large hoop earrings and a gold necklace around her neck. The woman turns her body to the right, gazing ahead.", "visual_elements": [ { "content": "Shot of a young woman seated in the driver's seat of a car.", "type": "camera", "weight": 3 }, { "content": "The camera captures the woman's upper torso from the left side.", "type": "camera", "weight": 3 }, { "content": "The woman wears a green camisole and a red jacket.", "type": "attribute", "weight": 2 }, { "content": "The woman accessorized with large hoop earrings and a gold necklace.", "type": "attribute", "weight": 2 }, { "content": "The woman turns her body to the right, gazing ahead.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to a man standing outside the car. He has curly short hair and wears a dark-colored coat, with his arms crossed. His gaze is directed downwards, focusing on the area in front of him. Behind him, there is a brick wall, with some green plants visible beyond it.", "visual_elements": [ { "content": "The camera cuts to a man standing outside the car.", "type": "camera", "weight": 3 }, { "content": "The man has curly short hair.", "type": "attribute", "weight": 2 }, { "content": "The man wears a dark-colored coat", "type": "attribute", "weight": 2 }, { "content": "The man's gaze is directed downwards.", "type": "attribute", "weight": 2 }, { "content": "Behind the man, there is a brick wall with green plants visible.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the inside of the car. The woman unbuckles her seatbelt. Her left hand rests on the steering wheel, while her right hand opens the car door.", "visual_elements": [ { "content": "The camera cuts back to the inside of the car.", "type": "camera", "weight": 3 }, { "content": "The woman unbuckles her seatbelt.", "type": "action", "weight": 3 }, { "content": "The woman's left hand rests on the steering wheel, right hand opens the car door.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts once again to outside the car. The woman exits the vehicle and walks towards the rear of the car, facing away from the camera. After she gets out, the man standing by the passenger door drops his arms and also walks towards the rear of the car. Eventually, the man reaches the driver's side door, while the woman arrives at the passenger side door. The car is colored in shades of red and dark green. On the right side of the vehicle, there are markings that read \"Ae School of Motoring\" and \"079460795\".", "visual_elements": [ { "content": "The camera cuts to outside the car.", "type": "camera", "weight": 3 }, { "content": "The woman exits the vehicle and walks towards the rear, facing away from the camera.", "type": "action", "weight": 3 }, { "content": "After the woman gets out, the man by the passenger door also walks towards the rear.", "type": "action", "weight": 3 }, { "content": "Eventually, the man reaches the driver's side door, the woman arrives at the passenger side door.", "type": "action", "weight": 3 }, { "content": "The car colored in shades of red and dark green.", "type": "attribute", "weight": 3 }, { "content": "Markings on the right side read \"Ae School of Motoring\" and \"079460795\".", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0192", "video_path": "VELOCITI/v_4ak8huhsVKc_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera shows an indoor scene. There is a row of windows on the left side of the room, with green plants and buildings visible outside. Inside, two dark-colored sofas are placed on the left and right sides of the scene, respectively. A brown solid wood coffee table is positioned between the sofas. On the sofa on the left side of the frame sits a naked young man, who holds a cup in his right hand and wears a watch on his right wrist. The man is facing sideways towards the camera, turning his head to the left to look at a young woman in front of him. The young woman, facing the camera, sits on a bar stool, wearing a light-colored T-shirt and pants. Her feet are bare, with dark-colored socks on. She holds a white cup in both hands and looks at the naked man. The woman shakes her head while speaking. The camera pans up, and a bald man wearing a black vest with white patterns enters from the right side of the frame, facing away from the camera. He walks to the back of the coffee table in front of the naked man. The bald man speaks to the naked man and raises his right hand, holding something. The naked man turns his head to look at the bald man in front of him.\nThe camera cuts to a shot of the bald man's front. The bald man holds a pair of black socks in his right hand. While speaking, he allows the black socks to fall naturally from his hand.\nFinally, the camera cuts to a shot of the naked man's front. The naked man looks at the bald man in front of him and speaks. A small piece of white fabric covers the naked man's left thigh. Sunlight shines through the windows, bringing bright light to the entire room.", "events": [ { "event": "At the beginning of the video, the camera shows an indoor scene. There is a row of windows on the left side of the room, with green plants and buildings visible outside. Inside, two dark-colored sofas are placed on the left and right sides of the scene, respectively. A brown solid wood coffee table is positioned between the sofas. On the sofa on the left side of the frame sits a naked young man, who holds a cup in his right hand and wears a watch on his right wrist. The man is facing sideways towards the camera, turning his head to the left to look at a young woman in front of him. The young woman, facing the camera, sits on a bar stool, wearing a light-colored T-shirt and pants. Her feet are bare, with dark-colored socks on. She holds a white cup in both hands and looks at the naked man. The woman shakes her head while speaking. The camera pans up, and a bald man wearing a black vest with white patterns enters from the right side of the frame, facing away from the camera. He walks to the back of the coffee table in front of the naked man. The bald man speaks to the naked man and raises his right hand, holding something. The naked man turns his head to look at the bald man in front of him.", "visual_elements": [ { "content": "The camera shows an indoor scene.", "type": "camera", "weight": 3 }, { "content": "There is a row of windows on the left side of the room, with green plants and buildings outside.", "type": "scene", "weight": 2 }, { "content": "Inside the room, two dark-colored sofas are placed on the left and right sides of the screen, respectively.", "type": "attribute", "weight": 2 }, { "content": "Between the sofas, there is a brown solid wood coffee table.", "type": "attribute", "weight": 2 }, { "content": "On the sofa on the left side of the screen, there sits a naked young man.", "type": "attribute", "weight": 3 }, { "content": "The man holds a cup in his right hand, and a watch is worn on his right wrist.", "type": "attribute", "weight": 2 }, { "content": "Facing slightly away from the camera, the man turns his head to the left to look at a woman with long hair in front of him.", "type": "attribute", "weight": 3 }, { "content": "The young woman faces the camera, sitting on a barstool in front of the counter.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a light-colored T-shirt and pants, with her feet bare and covered in dark-colored socks.", "type": "attribute", "weight": 2 }, { "content": "The woman holds a white cup in both hands and looks at the naked man.", "type": "attribute", "weight": 3 }, { "content": "As the woman shakes her head, she speaks.", "type": "action", "weight": 3 }, { "content": "The camera pans upwards.", "type": "camera", "weight": 3 }, { "content": "A bald man wearing a black vest with white patterns enters from the right side of the screen, backing away from the camera, and walks to the back of the coffee table in front of the naked man.", "type": "action", "weight": 3 }, { "content": "Facing the naked man, the bald man speaks and raises his right hand, holding something.", "type": "action", "weight": 3 }, { "content": "The naked man turns his head to look at the bald man in front of him.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a shot of the bald man's front. The bald man holds a pair of black socks in his right hand. While speaking, he allows the black socks to fall naturally from his hand.", "visual_elements": [ { "content": "The camera cuts to a front view of the bald man.", "type": "camera", "weight": 3 }, { "content": "The bald man holds a pair of black socks in his right hand.", "type": "attribute", "weight": 3 }, { "content": "The bald man lets the black socks fall naturally from his hand as he speaks,.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to a shot of the naked man's front. The naked man looks at the bald man in front of him and speaks. A small piece of white fabric covers the naked man's left thigh. Sunlight shines through the windows, bringing bright light to the entire room.", "visual_elements": [ { "content": "The camera cuts to a front view of the naked man.", "type": "camera", "weight": 3 }, { "content": "The naked man looks at the bald man in front of him and speaks.", "type": "action", "weight": 3 }, { "content": "A small piece of white fabric covers the naked man's left thigh.", "type": "attribute", "weight": 1 }, { "content": "Sunlight streams in through the windows, brightening the entire room.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0193", "video_path": "VELOCITI/v_5FgtVXFRyTQ_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on two men. The middle-aged man in a black suit occupies most of the right side of the screen. The man is wearing a white shirt and a brown patterned tie. The man's mouth corners are down, his brows are furrowed, and tears are gradually falling. On the left side of the screen is a young man wearing a hat, who is looking up at the man in a suit. The camera keeps a close-up shot of the two men's faces. The background is dark.\nThe camera switches to a scene in front of a door. The man opens the door and walks out while wiping his tears. The young man follows him and closes the door. The two stand in front of the door, one in front and one behind. The young man wears a gray hoodie, a yellow hat and black gloves. The young man takes out a red handkerchief with white polka dots from his pocket with his left hand. The young man hands the handkerchief to the middle-aged man. The middle-aged man pushes the young man's hand away and turns to speak to him seriously.", "events": [ { "event": "At the beginning of the video, the camera is focused on two men. The middle-aged man in a black suit occupies most of the right side of the screen. The man is wearing a white shirt and a brown patterned tie. The man's mouth corners are down, his brows are furrowed, and tears are gradually falling. On the left side of the screen is a young man wearing a hat, who is looking up at the man in a suit. The camera keeps a close-up shot of the two men's faces. The background is dark.", "visual_elements": [ { "content": "The camera is focused on two men.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man in a black suit occupies most of the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a white shirt and a brown patterned tie.", "type": "attribute", "weight": 2 }, { "content": "The man's mouth is down, his brows are furrowed, and tears are gradually falling.", "type": "action", "weight": 3 }, { "content": "On the left side of the screen is a young man wearing a hat.", "type": "attribute", "weight": 2 }, { "content": "The young man is looking up at the man in a suit.", "type": "attribute", "weight": 2 }, { "content": "The camera keeps a close-up shot of the two men's faces.", "type": "camera", "weight": 3 }, { "content": "The background is dark.", "type": "scene", "weight": 3 } ] }, { "event": "The camera switches to a scene in front of a door. The man opens the door and walks out while wiping his tears. The young man follows him and closes the door. The two stand in front of the door, one in front and one behind. The young man wears a gray hoodie, a yellow hat and black gloves. The young man takes out a red handkerchief with white polka dots from his pocket with his left hand. The young man hands the handkerchief to the middle-aged man. The middle-aged man pushes the young man's hand away and turns to speak to him seriously.", "visual_elements": [ { "content": "The camera switches to a scene in front of a door.", "type": "camera", "weight": 3 }, { "content": "The man in a suit opens the door and walks out while wiping his tears.", "type": "action", "weight": 3 }, { "content": "The young man follows him and closes the door.", "type": "action", "weight": 3 }, { "content": "The two stand in front of the door, one in front of the other.", "type": "attribute", "weight": 3 }, { "content": "The young man wears a gray hoodie, a yellow hat, and black gloves.", "type": "attribute", "weight": 2 }, { "content": "The young man takes out a red handkerchief with white dots from his pocket with his left hand.", "type": "action", "weight": 3 }, { "content": "The young man hands the handkerchief to the middle-aged man.", "type": "action", "weight": 3 }, { "content": "The middle-aged man pushes the young man's hand away and turns to speak to him seriously.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0194", "video_path": "VELOCITI/v_5RPUchAlppA_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a middle-aged white man on the bed. The man is only wearing a pair of white patterned underwear. The man first pushes aside the quilt with his hands, then sits up, crosses his hands, spreads his legs, and sits on the edge of the bed. The sheets and pillows are white. The quilt on the bed is purple on the outside and white on the inside. The quilt is piled messily on the side. The background of the room is a classically decorated bedroom. There is a painting hanging on the wall behind the man, and there is a table lamp on the bedside table with soft light. The white curtains in the room are drawn, letting in a faint light.\nThe camera moves slightly to the left and then to the right as the man moves. After sitting up and thinking for a few seconds, the man reaches out to the lower left of the screen to pick up an old-fashioned telephone. Throughout the process, the man's expression is serious and his movements are hurried and panicked.", "events": [ { "event": "At the beginning of the video, the camera is focused on a middle-aged white man on the bed. The man is only wearing a pair of white patterned underwear. The man first pushes aside the quilt with his hands, then sits up, crosses his hands, spreads his legs, and sits on the edge of the bed. The sheets and pillows are white. The quilt on the bed is purple on the outside and white on the inside. The quilt is piled messily on the side. The background of the room is a classically decorated bedroom. There is a painting hanging on the wall behind the man, and there is a table lamp on the bedside table with soft light. The white curtains in the room are drawn, letting in a faint light.", "visual_elements": [ { "content": "The camera is focused on a middle-aged white man on the bed.", "type": "camera", "weight": 3 }, { "content": "The man is only wearing a pair of white patterned underwear.", "type": "attribute", "weight": 3 }, { "content": "The man first uses his hands to push aside the quilt, then sits up.", "type": "action", "weight": 3 }, { "content": "The man sits on the edge of the bed with his hands crossed and legs apart.", "type": "action", "weight": 2 }, { "content": "The sheets and pillows are white.", "type": "attribute", "weight": 2 }, { "content": "The quilt on the bed is purple on the outside and white on the inside.", "type": "attribute", "weight": 1 }, { "content": "The quilt is piled messily aside.", "type": "attribute", "weight": 1 }, { "content": "The background of the room is a bedroom with classical decoration.", "type": "scene", "weight": 3 }, { "content": "There is a painting hanging on the wall behind the man.", "type": "scene", "weight": 1 }, { "content": "There is a soft-light table lamp on the bedside table.", "type": "scene", "weight": 2 }, { "content": "The white curtains in the room are drawn, letting in a faint light.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves slightly to the left and then to the right as the man moves. After sitting up and thinking for a few seconds, the man reaches out to the lower left of the screen to pick up an old-fashioned telephone. Throughout the process, the man's expression is serious and his movements are hurried and panicked.", "visual_elements": [ { "content": "The camera moves slightly to the left and then to the right as the man moves.", "type": "camera", "weight": 3 }, { "content": "The man sits up and ponders for a few seconds, then reaches to the lower left of the screen to pick up an old-fashioned telephone.", "type": "action", "weight": 3 }, { "content": "The man's expression is serious, and his movements are hurried and panicked.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0195", "video_path": "VELOCITI/v_5XINVbpWRmw_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a beautifully decorated room. The walls of the room are white. There is a green carpet in the center of the room. An octagonal reddish-brown wooden coffee table is placed on the carpet. On the coffee table is a large white porcelain bowl with exquisite patterns and two small white plates. There is a white double door at the end of the room. Two rows of reddish-brown chairs are neatly arranged on both sides of the double door. A soft-lighting table lamp is placed on the table. On the right wall, from inside to outside, there is a built-in bookcase, a table clock, a white door and a mirror.\nThen, a middle-aged white man pushes open the door on the right side of the screen and slowly walks to the left front of the screen. The man is wearing an off-white suit, a black tie, and holding a black briefcase. The camera moves up slightly, and a luxurious white tassel chandelier on the white ceiling of the room enters the screen from above.\nFinally, the scene switches to a dark room. The camera focuses on a man and a woman sleeping on a bed. The pillows on the bed are white with yellow patterns.", "events": [ { "event": "At the beginning of the video, the camera is focused on a beautifully decorated room. The walls of the room are white. There is a green carpet in the center of the room. An octagonal reddish-brown wooden coffee table is placed on the carpet. On the coffee table is a large white porcelain bowl with exquisite patterns and two small white plates. There is a white double door at the end of the room. Two rows of reddish-brown chairs are neatly arranged on both sides of the double door. A soft-lighting table lamp is placed on the table. On the right wall, from inside to outside, there is a built-in bookcase, a table clock, a white door and a mirror.", "visual_elements": [ { "content": "The camera is pointed at a beautifully decorated room.", "type": "camera", "weight": 3 }, { "content": "The walls of the room are white.", "type": "attribute", "weight": 3 }, { "content": "There is a green carpet in the middle of the room.", "type": "attribute", "weight": 2 }, { "content": "There is an octagonal reddish brown wooden coffee table on the carpet.", "type": "attribute", "weight": 2 }, { "content": "There is a large white porcelain bowl with exquisite patterns and two small white plates on the coffee table.", "type": "attribute", "weight": 1 }, { "content": "There is a white double door at the end of the room.", "type": "scene", "weight": 2 }, { "content": "Two rows of reddish brown chairs are neatly arranged on both sides of the double door.", "type": "scene", "weight": 2 }, { "content": "There is a soft table lamp on the table.", "type": "scene", "weight": 1 }, { "content": "On the right wall, from inside to outside, there is a built-in bookcase, a clock, a white door and a mirror.", "type": "scene", "weight": 2 } ] }, { "event": "Then, a middle-aged white man pushes open the door on the right side of the screen and slowly walks to the left front of the screen. The man is wearing an off-white suit, a black tie, and holding a black briefcase. The camera moves up slightly, and a luxurious white tassel chandelier on the white ceiling of the room enters the screen from above.", "visual_elements": [ { "content": "Then, a middle-aged white man appears from the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man pushes open the door on the right side of the screen and slowly walks to the left front of the screen.", "type": "action", "weight": 3 }, { "content": "The man wears an off-white suit, a black tie, and holds a black briefcase.", "type": "attribute", "weight": 3 }, { "content": "The camera moves slightly upward.", "type": "camera", "weight": 3 }, { "content": "A luxurious white tassel chandelier on the white ceiling of the room enters the screen from above.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the scene switches to a dark room. The camera focuses on a man and a woman sleeping on a bed. The pillows on the bed are white with yellow patterns.", "visual_elements": [ { "content": "Finally, the scene switches to a dimly lit room.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on a man and a woman sleeping on a bed.", "type": "camera", "weight": 3 }, { "content": "The pillows on the bed are white with yellow patterns.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0196", "video_path": "VELOCITI/v_5XINVbpWRmw_seg_65_75.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on two middle-aged men. One is a middle-aged white man in a light-colored suit. The white man stands next to a desk with a red button on it. The other is a middle-aged black man in a black suit. The black man lies on a black sofa in front of the white man with his hands crossed over his abdomen. The white man bends over and points his finger forward while talking, and the black man turns his head to look at the white man. Suddenly, a red light flashes in the room. The camera image also begins to flash. The background is an elegantly decorated room. There is a painting hanging on the wall behind the white man. There is a white table lamp and a green plant on the table on the right side of the screen. The curtain on the left side of the screen is green. There is a table in front of the curtains. There is a black briefcase on the table.\nThen, the camera pans to the left following the white man. The red light keeps flashing, and the white man turns around, picks up the briefcase on the table, and runs to the left of the screen. The black man keeps lying down and slightly raises his head. Then, the white man stands in front of the door on the left side of the screen, takes out the key from his trouser pocket, and sticks the key to a certain place on the right side of the door, and the door automatically moves to the left side of the screen. Outside the door stands a guard in black and white uniforms. The white man stands in front of the door and gestures to the black man outside the screen. Finally, the door automatically closes, interrupting the white man's speech. The video stops when the door is more than half closed. There are several paintings hanging on the door on the left side of the screen.", "events": [ { "event": "At the beginning of the video, the camera is focused on two middle-aged men. One is a middle-aged white man in a light-colored suit. The white man stands next to a desk with a red button on it. The other is a middle-aged black man in a black suit. The black man lies on a black sofa in front of the white man with his hands crossed over his abdomen. The white man bends over and points his finger forward while talking, and the black man turns his head to look at the white man. Suddenly, a red light flashes in the room. The camera image also begins to flash. The background is an elegantly decorated room. There is a painting hanging on the wall behind the white man. There is a white table lamp and a green plant on the table on the right side of the screen. The curtain on the left side of the screen is green. There is a table in front of the curtains. There is a black briefcase on the table.", "visual_elements": [ { "content": "The camera is pointed at two middle-aged men.", "type": "camera", "weight": 3 }, { "content": "One is a middle-aged white man in a light-colored suit.", "type": "attribute", "weight": 3 }, { "content": "The white man is standing next to a desk.", "type": "action", "weight": 3 }, { "content": "There is a red button on the desk.", "type": "attribute", "weight": 1 }, { "content": "The other is a middle-aged black man in a black suit.", "type": "attribute", "weight": 3 }, { "content": "The black man lies on a black sofa in front of the white man with his hands crossed over his abdomen.", "type": "action", "weight": 3 }, { "content": "The white man is bent over and pointing forward while talking.", "type": "action", "weight": 2 }, { "content": "The black man turns his head to look at the white man.", "type": "action", "weight": 1 }, { "content": "Suddenly, a red light flashes in the room.", "type": "scene", "weight": 3 }, { "content": "The camera image also starts to flash.", "type": "camera", "weight": 3 }, { "content": "The background is an elegantly decorated room.", "type": "scene", "weight": 3 }, { "content": "There is a painting on the wall behind the white man.", "type": "scene", "weight": 2 }, { "content": "There is a white desk lamp and a green plant on the table on the right side of the picture.", "type": "scene", "weight": 1 }, { "content": "The curtain on the left side of the picture is green.", "type": "scene", "weight": 1 }, { "content": "There is a table in front of the curtains.", "type": "scene", "weight": 1 }, { "content": "There is a black briefcase on the table.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera pans to the left following the white man. The red light keeps flashing, and the white man turns around, picks up the briefcase on the table, and runs to the left of the screen. The black man keeps lying down and slightly raises his head. Then, the white man stands in front of the door on the left side of the screen, takes out the key from his trouser pocket, and sticks the key to a certain place on the right side of the door, and the door automatically moves to the left side of the screen. Outside the door stands a guard in black and white uniforms. The white man stands in front of the door and gestures to the black man outside the screen. Finally, the door automatically closes, interrupting the white man's speech. The video stops when the door is more than half closed. There are several paintings hanging on the door on the left side of the screen.", "visual_elements": [ { "content": "Then, the camera pans to the left following the white man.", "type": "camera", "weight": 3 }, { "content": "The red light keeps flashing.", "type": "scene", "weight": 3 }, { "content": "The white man turns around, picks up the briefcase on the table and runs to the left of the screen.", "type": "action", "weight": 3 }, { "content": "The black man keeps lying down and slightly raises his head.", "type": "action", "weight": 1 }, { "content": "Then, the white man stands in front of the door on the left side of the screen, takes out the key from his trouser pocket and sticks it to a certain place on the right side of the door.", "type": "action", "weight": 3 }, { "content": "The door automatically moves to the left side of the screen.", "type": "action", "weight": 2 }, { "content": "A guard in a black and white uniform stands outside the door.", "type": "attribute", "weight": 1 }, { "content": "The white man stands in front of the door and gestures to the black man outside the screen.", "type": "action", "weight": 3 }, { "content": "Finally, the door closes automatically, interrupting the white man's speech.", "type": "action", "weight": 1 }, { "content": "The video stops when the door is more than half closed.", "type": "attribute", "weight": 3 }, { "content": "There are several paintings hanging on the door on the left side of the screen.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 27 }, { "index": "TUNA_0197", "video_path": "VELOCITI/v_5ZXyC0SDHNw_seg_120_130.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a character wearing a red coat, grey overalls, and white gloves. The character's head is decorated with white feathers, and the face has an orange beak and blue eyes. The character's tie is white with a red pattern on it. The character is sitting on the ground and talking, with his back against a brown counter. In the background is a counter with a black and white checkered decoration. There are black chair legs on the left side of the frame. There is a green kitchen cabinet next to the chair. There is a red cloth with white stripes hanging on the kitchen cabinet.\nThen, the camera switches to a further perspective. A woman appears on the left side of the screen. The woman has short blond hair and wears a pink long-sleeved top and a gray vest with a pattern. The woman squats next to the counter. The character in red bends over and slowly climbs up from under the counter, gesturing with both hands while talking. Bottles, various toys and decorations are placed on the black and white checkered counter on the left side of the screen. Various kitchen utensils are also placed on the kitchen cabinet behind the left side of the screen.", "events": [ { "event": "The video begins with the camera focusing on a character wearing a red coat, grey overalls, and white gloves. The character's head is decorated with white feathers, and the face has an orange beak and blue eyes. The character's tie is white with a red pattern on it. The character is sitting on the ground and talking, with his back against a brown counter. In the background is a counter with a black and white checkered decoration. There are black chair legs on the left side of the frame. There is a green kitchen cabinet next to the chair. There is a red cloth with white stripes hanging on the kitchen cabinet.", "visual_elements": [ { "content": "The camera is focused on a character wearing a red coat, grey overalls and white gloves.", "type": "camera", "weight": 3 }, { "content": "The character's head is decorated with white feathers.", "type": "attribute", "weight": 3 }, { "content": "The face has an orange beak and blue eyes.", "type": "attribute", "weight": 3 }, { "content": "The character's tie is white with a red pattern on it.", "type": "attribute", "weight": 2 }, { "content": "The character is sitting on the ground and talking.", "type": "action", "weight": 3 }, { "content": "The character is leaning against a brown counter.", "type": "attribute", "weight": 2 }, { "content": "The background is a counter with black and white checkered decoration.", "type": "scene", "weight": 3 }, { "content": "There are black chair legs on the left side of the frame.", "type": "scene", "weight": 1 }, { "content": "There is a green kitchen cabinet next to the chair.", "type": "scene", "weight": 1 }, { "content": "There is a red cloth with white stripes hanging on the kitchen cabinet.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to a further perspective. A woman appears on the left side of the screen. The woman has short blond hair and wears a pink long-sleeved top and a gray vest with a pattern. The woman squats next to the counter. The character in red bends over and slowly climbs up from under the counter, gesturing with both hands while talking. Bottles, various toys and decorations are placed on the black and white checkered counter on the left side of the screen. Various kitchen utensils are also placed on the kitchen cabinet behind the left side of the screen.", "visual_elements": [ { "content": "Then, the camera switches to a further perspective.", "type": "camera", "weight": 3 }, { "content": "A woman appears on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The woman has short blond hair, and wears a pink long-sleeved top and a gray vest with patterns.", "type": "attribute", "weight": 2 }, { "content": "The character in red bends over and slowly climbs up from under the counter.", "type": "action", "weight": 3 }, { "content": "Then, the character in red gestures with both hands while talking.", "type": "action", "weight": 3 }, { "content": "On the black and white checkered counter on the left side of the screen, there are bottles, various toys and decorations.", "type": "scene", "weight": 2 }, { "content": "There are also various kitchen utensils on the kitchen cabinet behind the left side of the screen.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0198", "video_path": "VELOCITI/v_5ZXyC0SDHNw_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young white woman in an indoor room. The woman is wearing a pink long-sleeved dance dress with a patterned overalls skirt on the outside, and there are two pink round decorations on the skirt. The woman's short blond hair is relatively fluffy. The woman wears a pink bow hair accessory on her head. The woman stands in a dim room and puts her right hand on her chest while talking. There are some green and pink neon lights in the background. The walls and ceiling of the room are made of wood.\nThen, the camera pans left with the woman, and the camera moves to a new scene. The woman walks to the left side of the screen while pointing with her finger. There is a silver metal shelf on the left side of the screen. Some kitchen utensils and sundries are placed on the metal shelf. The woman reaches out and picks up a blue bowl and takes a closer look. Then the woman turns around and hands the bowl to the lower right of the camera. There are various decorations hanging on the wall behind the shelf. There is a white shelf with black and white grid on the right side of the screen.\nFinally, the camera switches to a new scene, focusing on a character at a dining table. The character looks strange. The character is wearing a red shirt, a red vest, and a white tie with a red and blue pattern. The character's head is wrapped in white feathers, and the face has an orange beak and blue eyes. The character has a gray plaid coat on his right hand. There is a yellow dining table in the background. There are some tableware and food on the table. It is raining outside the window, and there are traces of water droplets on the glass.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young white woman in an indoor room. The woman is wearing a pink long-sleeved dance dress with a patterned overalls skirt on the outside, and there are two pink round decorations on the skirt. The woman's short blond hair is relatively fluffy. The woman wears a pink bow hair accessory on her head. The woman stands in a dim room and puts her right hand on her chest while talking. There are some green and pink neon lights in the background. The walls and ceiling of the room are made of wood.", "visual_elements": [ { "content": "The camera focuses on a young white woman in an indoor room.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a pink long-sleeved shirt with a patterned suspender skirt on the outside.", "type": "attribute", "weight": 3 }, { "content": "There are two pink round decorations on the suspender skirt.", "type": "attribute", "weight": 1 }, { "content": "The woman's short blond hair is relatively fluffy.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a pink bow hair ornament on her head.", "type": "attribute", "weight": 1 }, { "content": "The woman stands in a dim room and puts her right hand on her chest while talking.", "type": "action", "weight": 3 }, { "content": "There are some green and pink neon lights in the background.", "type": "scene", "weight": 3 }, { "content": "The walls and ceiling of the room are made of wood.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera pans left with the woman, and the camera moves to a new scene. The woman walks to the left side of the screen while pointing with her finger. There is a silver metal shelf on the left side of the screen. Some kitchen utensils and sundries are placed on the metal shelf. The woman reaches out and picks up a blue bowl and takes a closer look. Then the woman turns around and hands the bowl to the lower right of the camera. There are various decorations hanging on the wall behind the shelf. There is a white shelf with black and white grid on the right side of the screen.", "visual_elements": [ { "content": "Then, the camera pans left with the woman.", "type": "camera", "weight": 3 }, { "content": "The camera moves to a new scene.", "type": "camera", "weight": 3 }, { "content": "The woman walks to the left side of the screen while pointing with her finger.", "type": "action", "weight": 3 }, { "content": "There is a silver metal shelf on the left side of the screen.", "type": "scene", "weight": 3 }, { "content": "Some kitchen utensils and sundries are placed on the metal shelf.", "type": "scene", "weight": 2 }, { "content": "The woman reaches out and picks up a blue bowl and takes a closer look.", "type": "action", "weight": 3 }, { "content": "Then the woman turns around and hands the bowl to the lower right of the camera.", "type": "action", "weight": 2 }, { "content": "There are various decorations hanging on the wall behind the shelf.", "type": "scene", "weight": 1 }, { "content": "There is a white shelf with black and white grid on the right side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera switches to a new scene, focusing on a character at a dining table. The character looks strange. The character is wearing a red shirt, a red vest, and a white tie with a red and blue pattern. The character's head is wrapped in white feathers, and the face has an orange beak and blue eyes. The character has a gray plaid coat on his right hand. There is a yellow dining table in the background. There are some tableware and food on the table. It is raining outside the window, and there are traces of water droplets on the glass.", "visual_elements": [ { "content": "Finally, the camera switches to a new scene.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on a character in front of a dining table.", "type": "camera", "weight": 3 }, { "content": "The character looks strange.", "type": "attribute", "weight": 3 }, { "content": "The character wears a red shirt, a red vest, and a white tie with a red and blue pattern.", "type": "attribute", "weight": 2 }, { "content": "The character's head is wrapped in white feathers.", "type": "attribute", "weight": 2 }, { "content": "The character's face has an orange beak and blue eyes.", "type": "attribute", "weight": 2 }, { "content": "The character has a gray plaid coat on his right hand.", "type": "action", "weight": 3 }, { "content": "There is a yellow dining table in the background.", "type": "scene", "weight": 3 }, { "content": "Some tableware and food are placed on the table.", "type": "scene", "weight": 2 }, { "content": "It is raining outside the window, and there are traces of water droplets on the glass.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 27 }, { "index": "TUNA_0199", "video_path": "VELOCITI/v_5ZXyC0SDHNw_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at a dim room. There is a pink neon light on the ceiling of the room. On the right side of the screen, a young white woman turns on the lights in the room. The woman is wearing a red coat with black patterns on the sleeves. The woman's short blond hair is fluffy. The woman wears a pink headdress on her head. The woman carries a black instrument bag on her left shoulder and a black bag on her right shoulder.\nThe camera slowly moves to the lower right and forward. A strange-looking character slowly enters the screen from the right side of the screen. The character is wearing a red shirt with a pink vest on the outside. The character's head is wrapped in white feathers and has an orange beak on his face. The woman faces the strange-looking character and speaks to it, first putting down the red umbrella in her left hand, and then putting down the two backpacks on her shoulders. The background of the room is very messy, filled with various furniture and decorations. There is a sofa on the left side of the room, with a red and yellow blanket on it, and a small table next to it with a table lamp on it. There are some decorative paintings and posters hanging on the wall on the left side of the screen. Some natural light comes in from a window on the right side of the room, illuminating part of the room. In front of the window is a round table with tableware on it. There are several yellow chairs around the round table.\nFinally, the camera moves to the left. The woman walks into the room.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a dim room. There is a pink neon light on the ceiling of the room. On the right side of the screen, a young white woman turns on the lights in the room. The woman is wearing a red coat with black patterns on the sleeves. The woman's short blond hair is fluffy. The woman wears a pink headdress on her head. The woman carries a black instrument bag on her left shoulder and a black bag on her right shoulder.", "visual_elements": [ { "content": "The camera is pointed at a dim room.", "type": "camera", "weight": 3 }, { "content": "There is a pink neon light on the ceiling of the room.", "type": "scene", "weight": 3 }, { "content": "On the right side of the picture, a woman turns on the light in the room.", "type": "action", "weight": 3 }, { "content": "The woman is wearing a red coat with black patterns on the sleeves.", "type": "attribute", "weight": 3 }, { "content": "The woman has fluffy short blond hair.", "type": "attribute", "weight": 2 }, { "content": "The woman is wearing a pink headdress on her head.", "type": "attribute", "weight": 1 }, { "content": "The woman is carrying a black instrument bag on her left shoulder and a black bag on her right shoulder.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera slowly moves to the lower right and forward. A strange-looking character slowly enters the screen from the right side of the screen. The character is wearing a red shirt with a pink vest on the outside. The character's head is wrapped in white feathers and has an orange beak on his face. The woman faces the strange-looking character and speaks to it, first putting down the red umbrella in her left hand, and then putting down the two backpacks on her shoulders. The background of the room is very messy, filled with various furniture and decorations. There is a sofa on the left side of the room, with a red and yellow blanket on it, and a small table next to it with a table lamp on it. There are some decorative paintings and posters hanging on the wall on the left side of the screen. Some natural light comes in from a window on the right side of the room, illuminating part of the room. In front of the window is a round table with tableware on it. There are several yellow chairs around the round table.", "visual_elements": [ { "content": "The camera slowly moves to the lower right and forward.", "type": "camera", "weight": 3 }, { "content": "A strange-looking character slowly enters the screen from the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The character wears a red shirt with a pink vest on the outside.", "type": "attribute", "weight": 2 }, { "content": "The character's head is wrapped in white feathers.", "type": "attribute", "weight": 2 }, { "content": "The character has an orange beak on his face.", "type": "attribute", "weight": 1 }, { "content": "The woman faces the strange-looking character and talks to it.", "type": "attribute", "weight": 3 }, { "content": "The woman first puts down the red umbrella in her left hand, and then puts down the two backpacks on her shoulders.", "type": "action", "weight": 3 }, { "content": "The background of the room is very messy, full of various furniture and decorations.", "type": "scene", "weight": 2 }, { "content": "There is a sofa on the left side of the screen in the room.", "type": "scene", "weight": 2 }, { "content": "The sofa is covered with a red and yellow blanket.", "type": "scene", "weight": 1 }, { "content": "There is also a small table next to the sofa.", "type": "scene", "weight": 1 }, { "content": "There is a table lamp on the table.", "type": "scene", "weight": 1 }, { "content": "There are some decorative paintings and posters on the wall on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "Some natural light comes in from a window on the right side of the screen in the room, illuminating part of the room.", "type": "scene", "weight": 2 }, { "content": "In front of the window is a round table.", "type": "scene", "weight": 2 }, { "content": "There are tableware on the table.", "type": "scene", "weight": 1 }, { "content": "There are several yellow chairs around the round table.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera moves to the left. The woman walks into the room.", "visual_elements": [ { "content": "Finally, the camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The woman walks into the room.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 26 }, { "index": "TUNA_0200", "video_path": "VELOCITI/v_5ZXyC0SDHNw_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, there are two main characters in the video. A young white woman in a pink top is standing on the right side of the screen. The woman is holding two bottles of beer. On the left side of the screen, opposite the woman, there is a character wearing a red shirt and a pink vest. The character looks strange, with white feathers covering his head and hands and an orange beak on his face. The strange-looking character is holding a white duck doll in his hands. The doll has a yellow decoration on its neck and a black pattern on its body. The background is an indoor environment with some furniture and decorations. There is a silver metal shelf on the right side of the screen with some sundries on it. There is a lit lamp hanging from the ceiling on the left side of the screen.\nThe camera then switches to a closer view, focusing on the strange-looking character's face and the woman's movements. The woman bends down, takes the doll from the strange-looking character's hand, and hands the character a bottle of wine.\nThen, the camera switches to a close-up of the strange-looking character. The character's eyes are fixed on the bottle in his right hand, ready to drink. There is a square yellow table in the background. The table is set with tableware and food. A red coat with black and white sleeves is hung on the chair in front of the table.\nThen, the camera switches to a close-up of the woman. There is a pink ornament on the woman's head. The woman is standing and talking to the lower left side of the screen. In the background, there is a green light on the right side of the screen. There is a blue light hanging from the ceiling on the left side of the screen.\nFinally, the camera switches to the strange-looking character again, and then immediately switches to the woman. The strange-looking character puts down the bottle and turns his head to look in front of the screen. The woman looks to the right of the screen.", "events": [ { "event": "At the beginning of the video, there are two main characters in the video. A young white woman in a pink top is standing on the right side of the screen. The woman is holding two bottles of beer. On the left side of the screen, opposite the woman, there is a character wearing a red shirt and a pink vest. The character looks strange, with white feathers covering his head and hands and an orange beak on his face. The strange-looking character is holding a white duck doll in his hands. The doll has a yellow decoration on its neck and a black pattern on its body. The background is an indoor environment with some furniture and decorations. There is a silver metal shelf on the right side of the screen with some sundries on it. There is a lit lamp hanging from the ceiling on the left side of the screen.", "visual_elements": [ { "content": "There are two main characters in the video.", "type": "attribute", "weight": 3 }, { "content": "A young white woman in a pink top stands on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The woman holds two bottles of beer.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the screen, opposite the woman sits a character wearing a red shirt and a pink vest.", "type": "attribute", "weight": 3 }, { "content": "The character looks strange, with white feathers covering his head and hands.", "type": "attribute", "weight": 2 }, { "content": "The character has an orange beak on his face.", "type": "attribute", "weight": 2 }, { "content": "The strange-looking character holds a white duck doll in his hand.", "type": "attribute", "weight": 2 }, { "content": "The doll has a yellow decoration on its neck and a black pattern on its body.", "type": "attribute", "weight": 1 }, { "content": "The background is an indoor environment with some furniture and decorations.", "type": "scene", "weight": 3 }, { "content": "There is a silver metal shelf on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "There are some sundries on the metal shelf.", "type": "scene", "weight": 1 }, { "content": "A light is hanging from the ceiling on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then switches to a closer view, focusing on the strange-looking character's face and the woman's movements. The woman bends down, takes the doll from the strange-looking character's hand, and hands the character a bottle of wine.", "visual_elements": [ { "content": "Then, the camera switches to a closer view.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the face of the strange-looking character and the woman's movements.", "type": "camera", "weight": 3 }, { "content": "The woman bends down and takes the doll from the strange-looking character.", "type": "action", "weight": 3 }, { "content": "The woman hands the strange-looking character a bottle of wine.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches to a close-up of the strange-looking character. The character's eyes are fixed on the bottle in his right hand, ready to drink. There is a square yellow table in the background. The table is set with tableware and food. A red coat with black and white sleeves is hung on the chair in front of the table.", "visual_elements": [ { "content": "Then, the camera switches to a close-up of the strange-looking character.", "type": "camera", "weight": 3 }, { "content": "The character's eyes are fixed on the bottle in his right hand, ready to drink.", "type": "action", "weight": 3 }, { "content": "There is a square yellow table in the background.", "type": "scene", "weight": 2 }, { "content": "The table is laid with tableware and food.", "type": "scene", "weight": 1 }, { "content": "A red coat with black and white sleeves hangs on the chair in front of the table.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to a close-up of the woman. There is a pink ornament on the woman's head. The woman is standing and talking to the lower left side of the screen. In the background, there is a green light on the right side of the screen. There is a blue light hanging from the ceiling on the left side of the screen.", "visual_elements": [ { "content": "Then, the camera switches to a close-up of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman has a pink ornament on her head.", "type": "attribute", "weight": 1 }, { "content": "The woman stands and speaks facing the lower left of the screen.", "type": "action", "weight": 3 }, { "content": "In the background, there is a green light on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "A blue light hangs from the ceiling on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera switches to the strange-looking character again, and then immediately switches to the woman. The strange-looking character puts down the bottle and turns his head to look in front of the screen. The woman looks to the right of the screen.", "visual_elements": [ { "content": "The strange-looking character puts down the bottle and turns his head to look at the front of the screen.", "type": "action", "weight": 2 }, { "content": "The camera switches to the strange-looking character again.", "type": "camera", "weight": 3 }, { "content": "Then, the camera immediately switches to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman looks to the right side of the screen.", "type": "action", "weight": 1 } ] } ], "n_events": 5, "n_elements": 30 }, { "index": "TUNA_0201", "video_path": "VELOCITI/v_5ZXyC0SDHNw_seg_95_105.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "There are two main characters in the video. On the right side of the screen, there is a young white woman wearing a pink top and a black overalls with patterns. The woman is standing next to a green chair on the right side of the screen. The woman holds a bottle of beer in her left hand and takes the newspaper off the green chair with her right hand. On the left side of the screen, there is a character wearing a red shirt, a pink vest, and gray overalls from behind. The character looks strange, with white feathers wrapped around his head and hands. The strange-looking character is walking towards the green chair. The camera follows the character forward. The background is a room with complicated decorations. The walls of the room are covered with various items and photos. There is a kitchen area on the left side of the screen. There are some kitchen utensils and decorations on the white counter. In the middle of the screen, there is an old-fashioned small TV and a table lamp. Next to the TV is a table with black and white checkered corners. There are also some sundries on the table. The strange-looking character climbs onto the green chair, then turns around and sits down. The woman on the right side of the screen sits down and drinks the wine in her hand.\nFinally, the camera moves to the left and gradually focuses on the strange-looking character. Suddenly, the room starts to shake. The strange-looking character sways from side to side and immediately holds on to the handle of the chair.", "events": [ { "event": "There are two main characters in the video. On the right side of the screen, there is a young white woman wearing a pink top and a black overalls with patterns. The woman is standing next to a green chair on the right side of the screen. The woman holds a bottle of beer in her left hand and takes the newspaper off the green chair with her right hand. On the left side of the screen, there is a character wearing a red shirt, a pink vest, and gray overalls from behind. The character looks strange, with white feathers wrapped around his head and hands. The strange-looking character is walking towards the green chair. The camera follows the character forward. The background is a room with complicated decorations. The walls of the room are covered with various items and photos. There is a kitchen area on the left side of the screen. There are some kitchen utensils and decorations on the white counter. In the middle of the screen, there is an old-fashioned small TV and a table lamp. Next to the TV is a table with black and white checkered corners. There are also some sundries on the table. The strange-looking character climbs onto the green chair, then turns around and sits down. The woman on the right side of the screen sits down and drinks the wine in her hand.", "visual_elements": [ { "content": "There are two main characters in the video.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the screen, there is a young white woman wearing a pink top and a black overalls with patterns.", "type": "attribute", "weight": 3 }, { "content": "The woman is standing next to a green chair on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The woman holds a bottle of beer in her left hand and takes the newspaper off the green chair with her right hand.", "type": "action", "weight": 3 }, { "content": "On the left side of the screen, there is a character wearing a red shirt, pink vest, and gray overalls from behind.", "type": "attribute", "weight": 3 }, { "content": "The character looks strange, with his head and hands wrapped in white feathers.", "type": "attribute", "weight": 3 }, { "content": "The strange-looking character is walking towards the green chair.", "type": "action", "weight": 3 }, { "content": "The camera follows the character forward.", "type": "camera", "weight": 3 }, { "content": "The background is a room with complicated decorations.", "type": "scene", "weight": 3 }, { "content": "The walls of the room are covered with various items and photos.", "type": "scene", "weight": 2 }, { "content": "There is a kitchen area on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "There are some kitchen utensils and decorations on the white counter.", "type": "scene", "weight": 1 }, { "content": "In the middle of the screen, there is an old-fashioned small TV and a table lamp.", "type": "scene", "weight": 2 }, { "content": "Next to the TV, there is a table with black and white checkered corners.", "type": "scene", "weight": 2 }, { "content": "There are also some sundries on the table.", "type": "scene", "weight": 1 }, { "content": "The strange-looking character climbs onto the green chair, then turns around and sits down.", "type": "action", "weight": 3 }, { "content": "The woman on the right side of the screen sits down and drinks the wine in her hand.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera moves to the left and gradually focuses on the strange-looking character. Suddenly, the room starts to shake. The strange-looking character sways from side to side and immediately holds on to the handle of the chair.", "visual_elements": [ { "content": "Finally, the camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The camera gradually focuses on the strange-looking character.", "type": "camera", "weight": 3 }, { "content": "Suddenly, the room starts to shake.", "type": "scene", "weight": 3 }, { "content": "The strange-looking character sways from side to side and immediately holds on to the handle of the chair.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 21 }, { "index": "TUNA_0202", "video_path": "VELOCITI/v_5ke6m-Y8DHE_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at a hospital corridor. In the foreground, there is a middle-aged white man wearing a blue shirt with white flowers. The man walks out of the screen from the right side of the screen. Then, the camera focuses on two white women behind the man. On the right side of the screen is a young woman with blond hair and a sleeveless light-colored top with black stripes. Behind the woman is a little girl wearing a purple T-shirt with white stripes. The two are walking slowly. In the background, on the left side of the screen, a doctor in a white coat and a patient in a blue patterned top are talking at the counter. Behind the nurses' station, there is a male staff member in a blue nurse's uniform with his back to the camera. The background wall is blue, with some medical-related decorations and equipment on it.\nThen, the camera switches to the scene where the middle-aged man in a blue shirt with white flowers enters the ward, turns around, closes the door, and pulls the blue and white striped curtain. On the left side of the screen hangs a three-layer gray shelf with white medicine boxes on it. On the wall behind the man hangs a whiteboard with the patient's information and emergency contacts written on it. On the right side of the screen is a medical device.\nFinally, the camera zooms out. A hospital bed appears on the right side of the screen. A middle-aged white female patient lies on the bed, with various medical devices plugged into her body and some monitoring equipment next to her. A beige blanket with yellow patterns covers the patient's legs, and a brown stuffed toy is placed next to the bed. The middle-aged man on the left side of the screen stands in front of the curtain, looking at the patient. There is a corner of a window on the right side of the screen.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a hospital corridor. In the foreground, there is a middle-aged white man wearing a blue shirt with white flowers. The man walks out of the screen from the right side of the screen. Then, the camera focuses on two white women behind the man. On the right side of the screen is a young woman with blond hair and a sleeveless light-colored top with black stripes. Behind the woman is a little girl wearing a purple T-shirt with white stripes. The two are walking slowly. In the background, on the left side of the screen, a doctor in a white coat and a patient in a blue patterned top are talking at the counter. Behind the nurses' station, there is a male staff member in a blue nurse's uniform with his back to the camera. The background wall is blue, with some medical-related decorations and equipment on it.", "visual_elements": [ { "content": "The camera is pointed at a hospital corridor.", "type": "camera", "weight": 3 }, { "content": "There is a middle-aged white man in a blue shirt with white flowers in the foreground.", "type": "attribute", "weight": 2 }, { "content": "The man walks out of the picture from the right side of the screen.", "type": "action", "weight": 2 }, { "content": "Then, the camera focuses on the two white women behind the man.", "type": "camera", "weight": 3 }, { "content": "On the right side of the screen is a young woman with blond hair and a sleeveless light-colored top with black stripes.", "type": "attribute", "weight": 3 }, { "content": "Behind the woman is a little girl wearing a purple T-shirt with white stripes.", "type": "attribute", "weight": 3 }, { "content": "The two are walking slowly.", "type": "action", "weight": 3 }, { "content": "In the background, on the left side of the screen, a doctor in a white coat and a patient in a blue patterned top are talking at the counter.", "type": "scene", "weight": 2 }, { "content": "Behind the nurse station, there is a male staff member in a blue nurse uniform with his back to the camera.", "type": "scene", "weight": 1 }, { "content": "The background wall is blue.", "type": "scene", "weight": 2 }, { "content": "There are some medical-related decorations and equipment on the background wall.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to the scene where the middle-aged man in a blue shirt with white flowers enters the ward, turns around, closes the door, and pulls the blue and white striped curtain. On the left side of the screen hangs a three-layer gray shelf with white medicine boxes on it. On the wall behind the man hangs a whiteboard with the patient's information and emergency contacts written on it. On the right side of the screen is a medical device.", "visual_elements": [ { "content": "Then, the camera switches to the scene where the middle-aged man in a blue shirt with white flowers enters the ward.", "type": "camera", "weight": 3 }, { "content": "The man turns around and closes the door and pulls the blue and white striped curtain.", "type": "action", "weight": 3 }, { "content": "On the left side of the screen, there is a three-layer gray shelf.", "type": "scene", "weight": 2 }, { "content": "The shelf contains white medicine boxes.", "type": "scene", "weight": 1 }, { "content": "There is also a white board on the wall behind the man.", "type": "scene", "weight": 2 }, { "content": "The patient's information and emergency contacts are written on the white board.", "type": "scene", "weight": 1 }, { "content": "There is a medical equipment on the right side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera zooms out. A hospital bed appears on the right side of the screen. A middle-aged white female patient lies on the bed, with various medical devices plugged into her body and some monitoring equipment next to her. A beige blanket with yellow patterns covers the patient's legs, and a brown stuffed toy is placed next to the bed. The middle-aged man on the left side of the screen stands in front of the curtain, looking at the patient. There is a corner of a window on the right side of the screen.", "visual_elements": [ { "content": "Finally, the camera zooms out.", "type": "camera", "weight": 3 }, { "content": "A hospital bed appears on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "A middle-aged white female patient lies on the bed.", "type": "attribute", "weight": 3 }, { "content": "The patient is plugged with various medical devices, and there are some monitoring instruments next to her.", "type": "attribute", "weight": 2 }, { "content": "A beige blanket with yellow patterns covers the patient's legs.", "type": "attribute", "weight": 1 }, { "content": "A brown stuffed toy is placed next to the bed.", "type": "scene", "weight": 1 }, { "content": "A middle-aged man on the left side of the screen stands in front of the curtain, looking at the patient.", "type": "action", "weight": 3 }, { "content": "There is a corner of a window on the right side of the screen.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 26 }, { "index": "TUNA_0203", "video_path": "VELOCITI/v_5lqvuMwYODI_seg_105_115.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on the interior of a room. There is a bed in the room, and a middle-aged man in a white vest is lying on his back on the bed. The man is covered with a pink blanket. The man supports his body with his arms and looks up. There is a large window in the background. There are wire mesh and some branches outside the window. The headboard is yellow. There are pink and purple pillows with white stripes on the bed, and orange, red and blue sheets. The camera moves in to the man, and the man's features become more obvious. The man has a black beard and short black hair. The man hurriedly lifts the blanket.\nNext, the camera switches to an outdoor scene. A small plane is approaching the camera. There are two children sitting in the plane. The nose of the plane is facing the camera. The fuselage is silver and the wings are yellow. On the left side of the picture is a simple dark blue arched building. There are some yellow boxes in the background. The sky is clear, and there are some tall buildings and telephone poles in the distance.\nThen, the camera moves to the left front, gradually moving towards the middle-aged man. The middle-aged man opens the door of the small gray house in a panic and runs towards the plane. The man is wearing a white vest and black pants, with obvious tattoos on both arms. The plane gradually disappears on the right side of the screen.\nFinally, the camera switches to the man's back. The man stops and looks at the plane in front of him. In the background are tall buildings and telephone poles in the city. The lower floors are in off-white and orange on the left side of the screen, while the higher floors are in silver and black on the right side of the screen.", "events": [ { "event": "At the beginning of the video, the camera is focused on the interior of a room. There is a bed in the room, and a middle-aged man in a white vest is lying on his back on the bed. The man is covered with a pink blanket. The man supports his body with his arms and looks up. There is a large window in the background. There are wire mesh and some branches outside the window. The headboard is yellow. There are pink and purple pillows with white stripes on the bed, and orange, red and blue sheets. The camera moves in to the man, and the man's features become more obvious. The man has a black beard and short black hair. The man hurriedly lifts the blanket.", "visual_elements": [ { "content": "The camera is pointed at the interior of a room.", "type": "camera", "weight": 3 }, { "content": "There is a bed in the room.", "type": "attribute", "weight": 3 }, { "content": "A middle-aged man in a white vest lies on his back on the bed.", "type": "attribute", "weight": 3 }, { "content": "The man is covered with a pink blanket.", "type": "attribute", "weight": 2 }, { "content": "The man supports his body with his arms and looks up.", "type": "action", "weight": 3 }, { "content": "There is a large window in the background.", "type": "scene", "weight": 3 }, { "content": "There are wire mesh and some branches outside the window.", "type": "scene", "weight": 1 }, { "content": "The headboard is yellow.", "type": "scene", "weight": 1 }, { "content": "There are pink and purple pillows with white stripes on the bed, and orange, red and blue sheets.", "type": "scene", "weight": 1 }, { "content": "The camera moves towards the man.", "type": "camera", "weight": 3 }, { "content": "The man's features are more obvious.", "type": "attribute", "weight": 3 }, { "content": "The man has a black beard and short black hair.", "type": "attribute", "weight": 2 }, { "content": "The man hurriedly lifts the blanket.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to an outdoor scene. A small plane is approaching the camera. There are two children sitting in the plane. The nose of the plane is facing the camera. The fuselage is silver and the wings are yellow. On the left side of the picture is a simple dark blue arched building. There are some yellow boxes in the background. The sky is clear, and there are some tall buildings and telephone poles in the distance.", "visual_elements": [ { "content": "Next, the camera switches to an outdoor scene.", "type": "camera", "weight": 3 }, { "content": "A small plane is approaching the camera.", "type": "action", "weight": 3 }, { "content": "There are two children in the plane.", "type": "attribute", "weight": 2 }, { "content": "The nose of the plane is facing the camera, the fuselage is silver and the wings are yellow.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the picture is a simple dark blue arched building.", "type": "scene", "weight": 2 }, { "content": "There are some yellow boxes in the background.", "type": "scene", "weight": 2 }, { "content": "The sky is clear, and there are some tall buildings and telephone poles in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves to the left front, gradually moving towards the middle-aged man. The middle-aged man opens the door of the small gray house in a panic and runs towards the plane. The man is wearing a white vest and black pants, with obvious tattoos on both arms. The plane gradually disappears on the right side of the screen.", "visual_elements": [ { "content": "Then, the camera moves to the left front.", "type": "camera", "weight": 3 }, { "content": "The camera gradually moves towards the middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man opens the door of the small gray house in a panic and runs towards the plane.", "type": "action", "weight": 3 }, { "content": "The man is wearing black pants and has obvious tattoos on both arms.", "type": "attribute", "weight": 2 }, { "content": "The plane gradually disappears on the right side of the screen.", "type": "attribute", "weight": 1 } ] }, { "event": "Finally, the camera switches to the man's back. The man stops and looks at the plane in front of him. In the background are tall buildings and telephone poles in the city. The lower floors are in off-white and orange on the left side of the screen, while the higher floors are in silver and black on the right side of the screen.", "visual_elements": [ { "content": "Finally, the camera switches to the man's back.", "type": "camera", "weight": 3 }, { "content": "The man stops and looks at the plane in front of him.", "type": "action", "weight": 3 }, { "content": "There are high-rise buildings and telephone poles in the city in the background.", "type": "scene", "weight": 3 }, { "content": "The lower floors are in off-white and orange on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "The higher floors are in silver and black on the right side of the screen.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 30 }, { "index": "TUNA_0204", "video_path": "VELOCITI/v_5lqvuMwYODI_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a little white boy sitting on a bed. The boy has short brown curly hair, wears a white shirt, and holds a green glow stick. There is part of another person on the right side of the screen. The person on the right side of the screen is wearing a red top and dark jeans. The person on the right side of the screen also holds a green glow stick in his left hand. The person on the right side of the screen stands opposite the boy. The background is dark, and the headboard is wooden. The bed sheet is white with stripes. The pillow is blue.\nThen, the camera switches to a laboratory full of various equipment. The little boy is wearing a white shirt and a striped vest, holding a long green light stick in his left hand. The little boy is followed by a white girl wearing a black coat and a red inner layer, holding a short green light stick in her right hand. The girl's long hair falls to her shoulders. The two people walk from the right side of the screen to the center of the screen, bending over. The camera follows the two people's movements. There are many electronic devices and cables in the room. The equipment in the room is mainly silver. There is a monitor in the background showing a blue image.\nNext, the camera focuses on an open drawer, which contains a red gas cylinder and a blue device.\nFinally, the camera switches again. In the foreground is a middle-aged white man lying on a bed. The middle-aged man is wearing a white vest and has tattoos on his arms. The little boy gradually approaches the middle-aged man lying on the bed. In the background, the little girl runs from the right side of the screen to a device on the left side of the screen.", "events": [ { "event": "The video begins with a little white boy sitting on a bed. The boy has short brown curly hair, wears a white shirt, and holds a green glow stick. There is part of another person on the right side of the screen. The person on the right side of the screen is wearing a red top and dark jeans. The person on the right side of the screen also holds a green glow stick in his left hand. The person on the right side of the screen stands opposite the boy. The background is dark, and the headboard is wooden. The bed sheet is white with stripes. The pillow is blue.", "visual_elements": [ { "content": "A little white boy is half-lying on the bed.", "type": "attribute", "weight": 3 }, { "content": "The boy has short brown curly hair, wears a white shirt, and holds a green glow stick in his hand.", "type": "attribute", "weight": 3 }, { "content": "There is part of another person's body on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The person on the right side of the screen wears a red top and dark jeans.", "type": "attribute", "weight": 1 }, { "content": "The person on the right side of the screen also holds a green glow stick in his left hand.", "type": "attribute", "weight": 1 }, { "content": "The person on the right side of the screen stands opposite the boy.", "type": "attribute", "weight": 1 }, { "content": "The background is dark.", "type": "scene", "weight": 3 }, { "content": "The headboard is wooden.", "type": "scene", "weight": 1 }, { "content": "The bed sheet is white with stripes.", "type": "scene", "weight": 1 }, { "content": "The pillow is blue.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to a laboratory full of various equipment. The little boy is wearing a white shirt and a striped vest, holding a long green light stick in his left hand. The little boy is followed by a white girl wearing a black coat and a red inner layer, holding a short green light stick in her right hand. The girl's long hair falls to her shoulders. The two people walk from the right side of the screen to the center of the screen, bending over. The camera follows the two people's movements. There are many electronic devices and cables in the room. The equipment in the room is mainly silver. There is a monitor in the background showing a blue image.", "visual_elements": [ { "content": "Then, the camera switches to a laboratory full of various equipment.", "type": "camera", "weight": 3 }, { "content": "The little boy wears a white shirt and a striped vest, holding a long green glow stick in his left hand.", "type": "attribute", "weight": 3 }, { "content": "The little boy is followed by a white girl wearing a black coat and a red inner layer, holding a short green glow stick in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The girl's long hair is draped over her shoulders.", "type": "attribute", "weight": 1 }, { "content": "The two people walk from the right side of the screen to the center of the screen, bending over.", "type": "action", "weight": 3 }, { "content": "The camera follows the two people's movements.", "type": "camera", "weight": 3 }, { "content": "There are many electronic devices and cables in the room.", "type": "scene", "weight": 3 }, { "content": "The equipment in the room is mainly silver.", "type": "scene", "weight": 3 }, { "content": "There is a monitor in the background showing a blue image.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera focuses on an open drawer, which contains a red gas cylinder and a blue device.", "visual_elements": [ { "content": "Next, the camera focuses on an open drawer.", "type": "camera", "weight": 3 }, { "content": "Inside the drawer are red gas cylinders and a blue device.", "type": "attribute", "weight": 3 } ] }, { "event": "Finally, the camera switches again. In the foreground is a middle-aged white man lying on a bed. The middle-aged man is wearing a white vest and has tattoos on his arms. The little boy gradually approaches the middle-aged man lying on the bed. In the background, the little girl runs from the right side of the screen to a device on the left side of the screen.", "visual_elements": [ { "content": "Finally, the camera switches again.", "type": "camera", "weight": 3 }, { "content": "In the foreground is a middle-aged white man lying on a bed.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged man is wearing a white vest and has tattoos on his arms.", "type": "attribute", "weight": 3 }, { "content": "The little boy gradually approaches the middle-aged man lying on the bed.", "type": "attribute", "weight": 2 }, { "content": "In the background, the little girl runs from the right side of the screen to a device on the left side of the screen.", "type": "scene", "weight": 3 } ] } ], "n_events": 4, "n_elements": 26 }, { "index": "TUNA_0205", "video_path": "VELOCITI/v_5otacrrli04_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white man wearing a white short-sleeved shirt. The man stands next to a car and talks to the people in the car while shaking his body. The camera switches from different angles to show the man's front and side. The scene is at night. The background is the doorway of a house, which is made of brown wood. On the left side of the picture, there is a light next to the door.\nThen, the camera switches to the inside of the car. Another young man wearing a black and white striped vest and a white short-sleeved shirt sat in the driver's seat. The man wear a silver watch on his left hand, which is placed on the steering wheel. The man in the striped vest slaps the steering wheel angrily, and then leans against the backrest of the car. The light inside the car is dim.\nThen, the camera switches back to the outside of the car. The man in the white short-sleeved shirt puts his right hand on the roof of the car, pats it after speaking, and jogs to the left of the screen. The camera follows the man in the white short-sleeved shirt and pulls back. The rear window of the car appears in the screen. The car is white. The man in the striped vest on the left side of the screen gets out of the car and looks in the direction where the man in the white short-sleeved shirt is jogging.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white man wearing a white short-sleeved shirt. The man stands next to a car and talks to the people in the car while shaking his body. The camera switches from different angles to show the man's front and side. The scene is at night. The background is the doorway of a house, which is made of brown wood. On the left side of the picture, there is a light next to the door.", "visual_elements": [ { "content": "The camera is focused on a young white man in a white short-sleeved shirt.", "type": "camera", "weight": 3 }, { "content": "The man stands next to a car and talks to the people in the car while shaking his body.", "type": "action", "weight": 3 }, { "content": "The scene is at the door of a house at night.", "type": "scene", "weight": 3 }, { "content": "The door of the house is brown and wooden.", "type": "scene", "weight": 1 }, { "content": "On the left side of the screen, there is a light on next to the door.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to the inside of the car. Another young man wearing a black and white striped vest and a white short-sleeved shirt sat in the driver's seat. The man wear a silver watch on his left hand, which is placed on the steering wheel. The man in the striped vest slaps the steering wheel angrily, and then leans against the backrest of the car. The light inside the car is dim.", "visual_elements": [ { "content": "Then, the camera switches to the inside of the car.", "type": "camera", "weight": 3 }, { "content": "Another young man wearing a black and white striped vest and a white short-sleeved shirt is sitting in the driver's seat.", "type": "attribute", "weight": 3 }, { "content": "The man's left hand is wearing a silver watch, which is placed on the steering wheel.", "type": "attribute", "weight": 2 }, { "content": "The man in the striped vest slaps the steering wheel angrily, then leans on the backrest of the car.", "type": "action", "weight": 3 }, { "content": "The light inside the car is dim.", "type": "scene", "weight": 3 } ] }, { "event": "Then, the camera switches back to the outside of the car. The man in the white short-sleeved shirt puts his right hand on the roof of the car, pats it after speaking, and jogs to the left of the screen. The camera follows the man in the white short-sleeved shirt and pulls back. The rear window of the car appears in the screen. The car is white. The man in the striped vest on the left side of the screen gets out of the car and looks in the direction where the man in the white short-sleeved shirt is jogging.", "visual_elements": [ { "content": "Then, the camera switches back to the outside of the car again.", "type": "camera", "weight": 3 }, { "content": "The man in the white short-sleeved shirt puts his right hand on the roof of the car, pats it after speaking, and jogs to the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The camera follows the movement of the man in the white short-sleeved shirt and pulls back.", "type": "camera", "weight": 3 }, { "content": "The rear window of the car appears in the screen.", "type": "attribute", "weight": 2 }, { "content": "The car is white.", "type": "attribute", "weight": 1 }, { "content": "The man in the striped vest on the left side of the screen gets out of the car and looks in the direction where the man in the white short-sleeved shirt is jogging.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0206", "video_path": "VELOCITI/v_5otacrrli04_seg_120_130.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a woman in a blue nightgown stands in the middle of the room. The woman is facing the camera sideways and tidying up her messy hair. The scene is in a dimly lit room. To the right of the woman is a brown wooden door. There is a painting hanging on the wall on the right side of the screen. There is a small table under the painting with a few candles on it.\nThen, the camera switches to an outdoor night scene. The camera keeps moving backwards. A young white man in a white short-sleeved shirt gradually appears in the picture. The man stands in the dim light outside the building. The man holds a bunch of keys in his right hand. Then, two people fighting appear behind the man. The background is a corridor with some blurred light sources at the end of the corridor.\nThen, the camera switches to a slightly farther perspective. The man in the white short-sleeved shirt stands on the right side of the screen. A middle-aged man in a suit is pushed out of the door by a middle-aged man in a white hat behind him and falls to the ground. The man in the white hat wears a white shirt and blue jeans. The light in the background room is yellow. There is another figure in the door.\nFinally, the camera switches to a close-up. The camera focuses on two men who are fighting. The middle-aged man in the white hat walks out of the door, bends down and continues to beat the man in the suit on the ground. The man in the suit looks painful and has no power to fight back.", "events": [ { "event": "At the beginning of the video, a woman in a blue nightgown stands in the middle of the room. The woman is facing the camera sideways and tidying up her messy hair. The scene is in a dimly lit room. To the right of the woman is a brown wooden door. There is a painting hanging on the wall on the right side of the screen. There is a small table under the painting with a few candles on it.", "visual_elements": [ { "content": "A woman in a blue nightgown stands in the middle of the room.", "type": "attribute", "weight": 3 }, { "content": "The woman is facing the camera sideways and tidying up her messy hair.", "type": "action", "weight": 3 }, { "content": "The scene is in a dimly lit room.", "type": "scene", "weight": 3 }, { "content": "There is a brown wooden door to the right of the woman.", "type": "scene", "weight": 2 }, { "content": "There is a painting on the wall on the right side of the picture.", "type": "scene", "weight": 1 }, { "content": "There is a small table under the painting.", "type": "scene", "weight": 1 }, { "content": "There are a few candles on the table.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to an outdoor night scene. The camera keeps moving backwards. A young white man in a white short-sleeved shirt gradually appears in the picture. The man stands in the dim light outside the building. The man holds a bunch of keys in his right hand. Then, two people fighting appear behind the man. The background is a corridor with some blurred light sources at the end of the corridor.", "visual_elements": [ { "content": "Then, the camera switches to an outdoor night scene.", "type": "camera", "weight": 3 }, { "content": "The camera keeps moving backwards.", "type": "camera", "weight": 3 }, { "content": "A young white man in a white short-sleeved shirt gradually appears in the picture.", "type": "attribute", "weight": 3 }, { "content": "The man stands in the dim light outside the building.", "type": "attribute", "weight": 2 }, { "content": "The man holds a bunch of keys in his right hand.", "type": "attribute", "weight": 2 }, { "content": "Then, two people fighting appear behind the man.", "type": "scene", "weight": 1 }, { "content": "The background is a corridor.", "type": "scene", "weight": 3 }, { "content": "There are some blurred light sources at the end of the corridor.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to a slightly farther perspective. The man in the white short-sleeved shirt stands on the right side of the screen. A middle-aged man in a suit is pushed out of the door by a middle-aged man in a white hat behind him and falls to the ground. The man in the white hat wears a white shirt and blue jeans. The light in the background room is yellow. There is another figure in the door.", "visual_elements": [ { "content": "Then, the camera switches to a slightly farther perspective.", "type": "camera", "weight": 3 }, { "content": "The man in the white short-sleeved shirt stands on the right side of the picture.", "type": "attribute", "weight": 1 }, { "content": "A middle-aged man in a suit is pushed out of the door by a middle-aged man in a white hat behind him and falls to the ground.", "type": "action", "weight": 3 }, { "content": "The man in the white hat wears a white shirt and blue jeans.", "type": "attribute", "weight": 3 }, { "content": "The light in the background room is yellow.", "type": "scene", "weight": 2 }, { "content": "There is another figure in the door.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera switches to a close-up. The camera focuses on two men who are fighting. The middle-aged man in the white hat walks out of the door, bends down and continues to beat the man in the suit on the ground. The man in the suit looks painful and has no power to fight back.", "visual_elements": [ { "content": "Finally, the camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on two men fighting.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man in the white hat walks out of the door, bends down and continues to hit the man in the suit on the ground.", "type": "action", "weight": 3 }, { "content": "The man in the suit looks painful and has no power to fight back.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0207", "video_path": "VELOCITI/v_5otacrrli04_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a white car parked next to a building. Next to the car is a young man wearing a striped white T-shirt and jeans. The man closes the car door and runs to the left of the screen. In the background, there is a row of buildings on the left side of the screen. There is a faint light in front of the building, and the surrounding environment is relatively dim.\nNext, the scene switches to another outdoor area. A man in a striped T-shirt and another young man are walking on a path at night. The other man is wearing a white short-sleeved shirt and jeans. The two are talking while walking on the path in the direction of the camera. The man in the white short-sleeved shirt is in front and the striped man is behind. There is a stone wall in the background. There are some green plants next to the stone wall.\nThen, the camera pans to the right with the two people. The two people turn to the right of the screen. On the left side of the screen, there is a house connected to the stone wall. The house is lit with yellow lights.\nFinally, the camera switches to an indoor scene, where a young woman in a black suit stands in front of a white door and flips a picture on the wall. The background is the wall and white door of the room, and the light is relatively soft.", "events": [ { "event": "At the beginning of the video, the camera is focused on a white car parked next to a building. Next to the car is a young man wearing a striped white T-shirt and jeans. The man closes the car door and runs to the left of the screen. In the background, there is a row of buildings on the left side of the screen. There is a faint light in front of the building, and the surrounding environment is relatively dim.", "visual_elements": [ { "content": "The camera is focused on a white car parked next to a building.", "type": "camera", "weight": 3 }, { "content": "There is a young man wearing a striped white T-shirt and jeans next to the car.", "type": "attribute", "weight": 3 }, { "content": "The man closes the car door and runs to the left of the screen.", "type": "action", "weight": 3 }, { "content": "In the background, there is a row of buildings on the left side of the screen.", "type": "scene", "weight": 3 }, { "content": "There is a faint light in front of the building, and the surrounding environment is relatively dim.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the scene switches to another outdoor area. A man in a striped T-shirt and another young man are walking on a path at night. The other man is wearing a white short-sleeved shirt and jeans. The two are talking while walking on the path in the direction of the camera. The man in the white short-sleeved shirt is in front and the striped man is behind. There is a stone wall in the background. There are some green plants next to the stone wall.", "visual_elements": [ { "content": "Next, the scene switches to another outdoor area.", "type": "camera", "weight": 3 }, { "content": "A man in a striped T-shirt and another young man are walking on a path at night.", "type": "action", "weight": 3 }, { "content": "The other man is wearing a white short-sleeved shirt and jeans.", "type": "attribute", "weight": 2 }, { "content": "The two are talking while walking on the path in the direction of the camera.", "type": "action", "weight": 3 }, { "content": "The man in the white short-sleeved shirt is in front, and the striped man is behind.", "type": "attribute", "weight": 3 }, { "content": "The background is a stone wall.", "type": "scene", "weight": 3 }, { "content": "There are some green plants next to the stone wall.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera pans to the right with the two people. The two people turn to the right of the screen. On the left side of the screen, there is a house connected to the stone wall. The house is lit with yellow lights.", "visual_elements": [ { "content": "Then, the camera pans to the right with the two people.", "type": "camera", "weight": 3 }, { "content": "The two turn to the right of the screen.", "type": "action", "weight": 2 }, { "content": "The stone wall on the left side of the screen is connected to a house.", "type": "scene", "weight": 2 }, { "content": "The house is lit with yellow lights.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera switches to an indoor scene, where a young woman in a black suit stands in front of a white door and flips a picture on the wall. The background is the wall and white door of the room, and the light is relatively soft.", "visual_elements": [ { "content": "Finally, the camera switches to an indoor scene.", "type": "camera", "weight": 3 }, { "content": "A young woman in a black suit stands in front of a white door and is flipping the painting on the wall.", "type": "action", "weight": 3 }, { "content": "The background is the wall and white door of the room.", "type": "scene", "weight": 3 }, { "content": "The light in the room is relatively soft.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0208", "video_path": "VELOCITI/v_5u5ixEyjZng_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white man and woman sitting on a sofa. The man is wearing a dark short-sleeved shirt and a silver ring on his left ring finger. The woman has long brown hair and wears black clothes. The two sit on the sofa facing each other. The man strokes the woman's hair with his right hand and holds her hand with his left hand. The background is a warm room. There are several books in front of the woman. There are several paintings on the wall. There is a white sofa under a painting on the right side of the picture. The light in the room is warm and soft.\nThe man gets up from the sofa and walks to a room on the right side of the screen. The camera switches to the woman. The woman looks down. The last scene shows the woman holding a white note with black words on it.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white man and woman sitting on a sofa. The man is wearing a dark short-sleeved shirt and a silver ring on his left ring finger. The woman has long brown hair and wears black clothes. The two sit on the sofa facing each other. The man strokes the woman's hair with his right hand and holds her hand with his left hand. The background is a warm room. There are several books in front of the woman. There are several paintings on the wall. There is a white sofa under a painting on the right side of the picture. The light in the room is warm and soft.", "visual_elements": [ { "content": "The camera is focused on a young white man and woman sitting on a sofa.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a dark short-sleeved shirt and a silver ring on his left ring finger.", "type": "attribute", "weight": 3 }, { "content": "The woman has long black hair and wears black clothes.", "type": "attribute", "weight": 3 }, { "content": "The man strokes the woman's hair with his right hand and holds her hand with his left hand.", "type": "action", "weight": 3 }, { "content": "The background is a warm room.", "type": "scene", "weight": 3 }, { "content": "There are several books in front of the woman.", "type": "scene", "weight": 1 }, { "content": "There are several paintings on the wall.", "type": "scene", "weight": 2 }, { "content": "There is a white sofa under a painting on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "The light in the room is warm and soft.", "type": "scene", "weight": 3 } ] }, { "event": "The man gets up from the sofa and walks to a room on the right side of the screen. The camera switches to the woman. The woman looks down. The last scene shows the woman holding a white note with black words on it.", "visual_elements": [ { "content": "The man gets up from the sofa and walks to a room on the right side of the picture.", "type": "action", "weight": 3 }, { "content": "The camera switches to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman looks down.", "type": "action", "weight": 3 }, { "content": "The last picture shows a white note with black words in the woman's hand.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0209", "video_path": "VELOCITI/v_5u5ixEyjZng_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white woman sitting on a sofa. The woman has long brown hair and is wearing black clothes. The woman is talking. In the background, there is an old grandfather clock and some decorative paintings on the right side of the picture. The room is softly lit.\nThen, the camera switches to a young white man standing on the right side of the room. The man is wearing a dark blue T-shirt. The man is walking towards the camera. The camera moves slowly backwards following the man's movements. In the background, there is a wall lamp on the right side of the frame.\nFinally, the camera zooms out slightly and moves downward. The man sits to the left of the woman. There are a few books in front of the woman. There is a white sofa under a painting on the right side of the screen.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white woman sitting on a sofa. The woman has long brown hair and is wearing black clothes. The woman is talking. In the background, there is an old grandfather clock and some decorative paintings on the right side of the picture. The room is softly lit.", "visual_elements": [ { "content": "The camera is focused on a young white woman sitting on a sofa.", "type": "camera", "weight": 3 }, { "content": "The woman has long brown hair and wears black clothes.", "type": "attribute", "weight": 3 }, { "content": "The woman is talking.", "type": "action", "weight": 3 }, { "content": "In the background, there is an old grandfather clock and some decorative paintings on the right side of the screen.", "type": "scene", "weight": 1 }, { "content": "The room is softly lit.", "type": "scene", "weight": 3 } ] }, { "event": "Then, the camera switches to a young white man standing on the right side of the room. The man is wearing a dark blue T-shirt. The man is walking towards the camera. The camera moves slowly backwards following the man's movements. In the background, there is a wall lamp on the right side of the frame.", "visual_elements": [ { "content": "Then, the camera switches to a young white man standing on the right side of the room.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a dark blue T-shirt.", "type": "attribute", "weight": 3 }, { "content": "The man is walking towards the camera.", "type": "action", "weight": 3 }, { "content": "The camera moves slowly backwards following the man's movements.", "type": "camera", "weight": 3 }, { "content": "In the background, there is a wall lamp on the right side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera zooms out slightly and moves downward. The man sits to the left of the woman. There are a few books in front of the woman. There is a white sofa under a painting on the right side of the screen.", "visual_elements": [ { "content": "Finally, the camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The camera is slightly farther away.", "type": "camera", "weight": 2 }, { "content": "The man sits on the left side of the woman.", "type": "action", "weight": 3 }, { "content": "There are a few books in front of the woman.", "type": "scene", "weight": 1 }, { "content": "A white sofa is placed under a painting on the right side of the screen.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0210", "video_path": "VELOCITI/v_6FtMdJEg2ak_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera pointing to a tilted open window. Inside the window is an older white man wearing black clothes and with his white hair tied back. The background is dark. The window is designed with multiple small wooden panes. There are several chandeliers and table lamps on the ceiling in the room, emitting a soft yellow light.\nThen, the camera moves to the upper right. The camera gradually moves towards the older man in the window. The older man has a black watch on his left hand. The older man crosses his arms and looks out the window while talking. A middle-aged white man wearing a dark blue shirt appears from the right side of the screen behind the older man. The middle-aged man puts his left hand on his waist and talks to the person in front of him.\nThen, the camera switches to the interior. The older man at the window turns to face the middle-aged man and walks to the left of the screen. The middle-aged man walks to the table on the left side of the screen, picks up the phone from the table and hands it to the older man. The furnishings in the room are simple but stylish. There are some animal specimens and decorative paintings hanging on the wall. There are some documents and a desk lamp on the table on the left side of the screen.", "events": [ { "event": "The video begins with the camera pointing to a tilted open window. Inside the window is an older white man wearing black clothes and with his white hair tied back. The background is dark. The window is designed with multiple small wooden panes. There are several chandeliers and table lamps on the ceiling in the room, emitting a soft yellow light.", "visual_elements": [ { "content": "The camera is pointed at a tilted open window.", "type": "camera", "weight": 3 }, { "content": "In the window there is an older white man wearing black clothes and with white hair tied back.", "type": "attribute", "weight": 3 }, { "content": "The background is dark.", "type": "scene", "weight": 3 }, { "content": "The design of the window is composed of multiple small wooden panes.", "type": "scene", "weight": 3 }, { "content": "There are several chandeliers and table lamps on the ceiling of the room, emitting a soft yellow light.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera moves to the upper right. The camera gradually moves towards the older man in the window. The older man has a black watch on his left hand. The older man crosses his arms and looks out the window while talking. A middle-aged white man wearing a dark blue shirt appears from the right side of the screen behind the older man. The middle-aged man puts his left hand on his waist and talks to the person in front of him.", "visual_elements": [ { "content": "Then, the camera moves to the upper right.", "type": "camera", "weight": 3 }, { "content": "The camera gradually moves towards the older man in the window.", "type": "camera", "weight": 3 }, { "content": "The older man has a black watch on his left hand.", "type": "attribute", "weight": 1 }, { "content": "The older man crosses his arms and looks out the window and talks.", "type": "action", "weight": 3 }, { "content": "A middle-aged white man in a dark blue shirt appears from the right side of the screen behind the older man.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man speaks to the person in front of him with his left hand on his waist.", "type": "action", "weight": 2 } ] }, { "event": "Then, the camera switches to the interior. The older man at the window turns to face the middle-aged man and walks to the left of the screen. The middle-aged man walks to the table on the left side of the screen, picks up the phone from the table and hands it to the older man. The furnishings in the room are simple but stylish. There are some animal specimens and decorative paintings hanging on the wall. There are some documents and a desk lamp on the table on the left side of the screen.", "visual_elements": [ { "content": "Then, the camera switches to the room.", "type": "camera", "weight": 3 }, { "content": "The older man by the window turns around to face the middle-aged man and walks to the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The middle-aged man walks to the table on the left side of the screen.", "type": "action", "weight": 3 }, { "content": "Then, the middle-aged man picks up the phone from the table and hands it to the older man.", "type": "action", "weight": 3 }, { "content": "The furnishings in the room are simple but stylish.", "type": "scene", "weight": 3 }, { "content": "There are some animal specimens and decorative paintings hanging on the wall.", "type": "scene", "weight": 1 }, { "content": "There are some documents and table lamps on the table on the left side of the screen.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0211", "video_path": "VELOCITI/v_6J-PhFYNbOU_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera shows a camping scene at night. There are three people in the picture. In the foreground of the camera, there is a young white man wearing khaki trousers, a white shirt and a belt around his waist. The man holds a long gun in his hand, his head facing the right side of the picture, and lies on a white quilt on the ground. There is another man behind the young man. The man lies on a simple bed with his head facing the left side of the picture. Above the bed is a piece of red and blue striped cloth supported by wooden sticks. The third person is a woman wearing a black long skirt and holding a box in her hand. The woman walks from the right side of the picture to the front of the man lying on the ground. In the background, there is a light-colored tent on the right side of the picture. There are several people lying under the tent, as well as some scattered items. The camp is illuminated by dim lights.\nThen, the woman bends down and puts the box in front of the man lying on the ground. The camera switches to the woman's face. Then the woman immediately leaves from the left side of the screen.\nFinally, the camera is focused on the man lying on the ground. The man is half lying on a dark pillow. The man mutters something, then opens his eyes and looks in the direction the woman left.", "events": [ { "event": "At the beginning of the video, the camera shows a camping scene at night. There are three people in the picture. In the foreground of the camera, there is a young white man wearing khaki trousers, a white shirt and a belt around his waist. The man holds a long gun in his hand, his head facing the right side of the picture, and lies on a white quilt on the ground. There is another man behind the young man. The man lies on a simple bed with his head facing the left side of the picture. Above the bed is a piece of red and blue striped cloth supported by wooden sticks. The third person is a woman wearing a black long skirt and holding a box in her hand. The woman walks from the right side of the picture to the front of the man lying on the ground. In the background, there is a light-colored tent on the right side of the picture. There are several people lying under the tent, as well as some scattered items. The camp is illuminated by dim lights.", "visual_elements": [ { "content": "The camera shows a camping scene at night.", "type": "camera", "weight": 3 }, { "content": "There are three figures in the picture.", "type": "attribute", "weight": 3 }, { "content": "In the foreground of the camera, there is a young white man wearing khaki trousers and a white shirt with a belt around his waist.", "type": "attribute", "weight": 3 }, { "content": "The man holds a long gun in his hand, his head is facing the right side of the picture, and he lies on a white quilt on the ground.", "type": "attribute", "weight": 3 }, { "content": "There is another man behind the young man.", "type": "attribute", "weight": 2 }, { "content": "The man behind the young man lies on a simple bed with his head facing the left side of the picture.", "type": "attribute", "weight": 2 }, { "content": "Above the bed is a piece of red and blue striped cloth supported by wooden sticks.", "type": "scene", "weight": 2 }, { "content": "The third figure is a woman in a black long skirt.", "type": "attribute", "weight": 3 }, { "content": "The woman holds a box in both hands.", "type": "attribute", "weight": 3 }, { "content": "The woman walks from the right side of the picture to the front of the man lying on the ground.", "type": "action", "weight": 3 }, { "content": "In the background, there is a light-colored tent on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "Several people are lying under the tent, and there are some scattered items.", "type": "scene", "weight": 1 }, { "content": "The camp is illuminated by dim lights.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the woman bends down and puts the box in front of the man lying on the ground. The camera switches to the woman's face. Then the woman immediately leaves from the left side of the screen.", "visual_elements": [ { "content": "Then, the woman bends down and puts the box in front of the man lying on the ground.", "type": "action", "weight": 3 }, { "content": "The camera switches to the woman's face.", "type": "camera", "weight": 3 }, { "content": "Then the woman immediately leaves from the left side of the picture.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera is focused on the man lying on the ground. The man is half lying on a dark pillow. The man mutters something, then opens his eyes and looks in the direction the woman left.", "visual_elements": [ { "content": "Finally, the camera focuses on the man lying on the ground.", "type": "camera", "weight": 3 }, { "content": "The man is half lying on a dark pillow.", "type": "attribute", "weight": 3 }, { "content": "The man mutters something in his mouth, then opens his eyes and looks in the direction where the woman left.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0212", "video_path": "VELOCITI/v_6JSqeViZRU0_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a character wearing black and gray striped clothes and a top hat. The character has a hideous face, golden hair and long nails. The character stretches his hands forward and looks obscene. In the foreground of the camera, there is a pair of hands hanging naturally. The background is a wooden wall and a light purple wall.\nNext, the camera switches to an indoor scene. The camera moves upwards as the character moves. On the right side of the screen, a woman is covered with a pink towel and lying on her side on a pink bed. The woman's hair is curly and her back is black. The character's hands are close to the woman's back. The character first blows air in front of his mouth, rubs his hands, and then presses them against the woman's back. The character has a sly expression and a hint of evil in his eyes. In the background, there is a painting hanging on the white wall. On the right side of the screen, there are two layers of white shelves hanging on the wall, on which are neatly folded towels and other items.", "events": [ { "event": "At the beginning of the video, the camera is focused on a character wearing black and gray striped clothes and a top hat. The character has a hideous face, golden hair and long nails. The character stretches his hands forward and looks obscene. In the foreground of the camera, there is a pair of hands hanging naturally. The background is a wooden wall and a light purple wall.", "visual_elements": [ { "content": "The camera is focused on a character wearing black and grey striped clothes and a top hat.", "type": "camera", "weight": 3 }, { "content": "The character has a hideous face, golden hair and long nails.", "type": "attribute", "weight": 3 }, { "content": "The character has his hands stretched forward with a lewd look.", "type": "action", "weight": 3 }, { "content": "There are two hands hanging naturally in the foreground of the camera.", "type": "attribute", "weight": 2 }, { "content": "The background is a wooden wall and a light purple wall.", "type": "scene", "weight": 3 } ] }, { "event": "Next, the camera switches to an indoor scene. The camera moves upwards as the character moves. On the right side of the screen, a woman is covered with a pink towel and lying on her side on a pink bed. The woman's hair is curly and her back is black. The character's hands are close to the woman's back. The character first blows air in front of his mouth, rubs his hands, and then presses them against the woman's back. The character has a sly expression and a hint of evil in his eyes. In the background, there is a painting hanging on the white wall. On the right side of the screen, there are two layers of white shelves hanging on the wall, on which are neatly folded towels and other items.", "visual_elements": [ { "content": "Next, the camera switches to an indoor scene.", "type": "scene", "weight": 3 }, { "content": "The camera moves upwards with the character's movements.", "type": "camera", "weight": 3 }, { "content": "On the right side of the screen, there is a woman covered with a pink towel, lying on her side on a pink bed.", "type": "attribute", "weight": 2 }, { "content": "The woman has curly hair and a black back.", "type": "attribute", "weight": 1 }, { "content": "The character's hand is close to the woman's back.", "type": "action", "weight": 2 }, { "content": "The character first blows air in front of his mouth with both hands, rubs his hands and then presses them against the woman's back.", "type": "action", "weight": 3 }, { "content": "The character has a sly expression and a hint of evil smile in his eyes.", "type": "attribute", "weight": 2 }, { "content": "In the background, there is a painting hanging on the white wall.", "type": "scene", "weight": 2 }, { "content": "Two layers of white shelves are hung on the wall on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "The shelves are filled with neatly folded towels and other items.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0213", "video_path": "VELOCITI/v_6YbTy5AvRP4_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on an elderly white man in a police uniform. The man is wearing a light gray police uniform with a police badge on his left chest. The background is a dilapidated house with mottled walls and broken windows.\nThe camera follows the man's movements. The policeman enters the frame from the left and moves slowly to the right, looking around. The interior of the wooden house is very simple.\nThe camera follows the policeman and stops in front of a cabinet. The cabinet on the right corner of the screen is very shabby. The policeman walks to the cabinet, stops, and then opens the cabinet viciously.\nFinally, the camera pans left with the policeman, who stays in front of the cabinet for a moment and then turns to look to the left side of the screen.", "events": [ { "event": "At the beginning of the video, the camera is focused on an elderly white man in a police uniform. The man is wearing a light gray police uniform with a police badge on his left chest. The background is a dilapidated house with mottled walls and broken windows.", "visual_elements": [ { "content": "The camera is focused on an older white man in a police uniform.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a light grey police uniform with a police badge on his left chest.", "type": "attribute", "weight": 3 }, { "content": "The background is a dilapidated house.", "type": "scene", "weight": 3 }, { "content": "The walls in the house are mottled and the windows are broken.", "type": "scene", "weight": 3 } ] }, { "event": "The camera follows the man's movements. The policeman enters the frame from the left and moves slowly to the right, looking around. The interior of the wooden house is very simple.", "visual_elements": [ { "content": "The camera follows the man's movements.", "type": "camera", "weight": 3 }, { "content": "The policeman enters the picture from the left and moves slowly to the right, looking around.", "type": "action", "weight": 3 }, { "content": "The interior of the wooden house is very simple.", "type": "scene", "weight": 3 } ] }, { "event": "The camera follows the policeman and stops in front of a cabinet. The cabinet on the right corner of the screen is very shabby. The policeman walks to the cabinet, stops, and then opens the cabinet viciously.", "visual_elements": [ { "content": "The camera follows the policeman to a cabinet.", "type": "camera", "weight": 3 }, { "content": "The policeman walks to the cabinet and stops, then opens the cabinet viciously.", "type": "action", "weight": 3 }, { "content": "The cabinet in the right corner of the picture is very dilapidated.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera pans left with the policeman, who stays in front of the cabinet for a moment and then turns to look to the left side of the screen.", "visual_elements": [ { "content": "Finally, the camera follows the policeman to turn left.", "type": "camera", "weight": 3 }, { "content": "After the policeman stops in front of the cabinet for a moment, he turns and looks to the left of the picture.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 12 }, { "index": "TUNA_0214", "video_path": "VELOCITI/v_6a0at61sWkE_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a dense forest. There are two male characters in the forest. The camera quickly moves to the right, the trees in the forest are slightly sparse, and the features of two middle-aged Asian men are revealed. The man on the right side of the screen is wearing a black long-sleeved shirt with white patterns and dark pants. The man on the left side of the screen is wearing a blue jacket and brown pants. The man on the right side of the screen is hitting the head of the man on the left side of the screen with both hands. The man on the left side of the screen protects his head with both hands.\nNext, the scene switches to a battery car driving on a small road. There are five middle-aged Asians in the car. The one on the far right of the picture is a woman. The person in the driver's seat is blocked, and the remaining four people turn their heads to look back. The four people are all wearing white uniforms. The three people on the right side of the picture are wearing hats of different colors. The man in the foreground of the camera is wearing a gray hat, the woman on the far right of the picture is wearing a pink hat, and the man in the passenger seat is wearing a blue hat, a dark blue vest, and black-framed glasses. The person on the left side of the picture is wearing white transparent glasses. There are green trees and grass outside the transparent window of the car.\nFinally, the camera switches to a winding path. The path is surrounded by green grass and tall trees. A man in dark clothes runs out of the woods on the right side of the screen. The man holds a stick in his hand. The camera gradually zooms out. After standing on the path for a few seconds, the man runs along the path to the front of the screen.", "events": [ { "event": "At the beginning of the video, the camera focuses on a dense forest. There are two male characters in the forest. The camera quickly moves to the right, the trees in the forest are slightly sparse, and the features of two middle-aged Asian men are revealed. The man on the right side of the screen is wearing a black long-sleeved shirt with white patterns and dark pants. The man on the left side of the screen is wearing a blue jacket and brown pants. The man on the right side of the screen is hitting the head of the man on the left side of the screen with both hands. The man on the left side of the screen protects his head with both hands.", "visual_elements": [ { "content": "The camera focuses on a dense forest.", "type": "camera", "weight": 3 }, { "content": "There are two male characters in the forest.", "type": "attribute", "weight": 3 }, { "content": "The camera quickly moves to the right, and the trees in the forest are slightly sparse.", "type": "camera", "weight": 3 }, { "content": "The features of two middle-aged Asian men are revealed.", "type": "attribute", "weight": 3 }, { "content": "The man on the right side of the screen is wearing a black long-sleeved shirt with white patterns and dark pants.", "type": "attribute", "weight": 3 }, { "content": "The man on the left side of the screen is wearing a blue jacket and brown pants.", "type": "attribute", "weight": 3 }, { "content": "The man on the right side of the screen is hitting the man on the left side of the screen with both hands.", "type": "action", "weight": 3 }, { "content": "The man on the left side of the screen protects his head with both hands.", "type": "action", "weight": 2 } ] }, { "event": "Next, the scene switches to a battery car driving on a small road. There are five middle-aged Asians in the car. The one on the far right of the picture is a woman. The person in the driver's seat is blocked, and the remaining four people turn their heads to look back. The four people are all wearing white uniforms. The three people on the right side of the picture are wearing hats of different colors. The man in the foreground of the camera is wearing a gray hat, the woman on the far right of the picture is wearing a pink hat, and the man in the passenger seat is wearing a blue hat, a dark blue vest, and black-framed glasses. The person on the left side of the picture is wearing white transparent glasses. There are green trees and grass outside the transparent window of the car.", "visual_elements": [ { "content": "Next, the scene switches to an electric car driving on a small road.", "type": "camera", "weight": 3 }, { "content": "There are five middle-aged Asians in the car.", "type": "attribute", "weight": 3 }, { "content": "The one on the far right of the screen is a woman.", "type": "attribute", "weight": 1 }, { "content": "The person in the driver's seat is blocked, and the remaining four people turn their heads to look back.", "type": "action", "weight": 3 }, { "content": "All four people are wearing white uniforms.", "type": "attribute", "weight": 3 }, { "content": "The three people on the right side of the screen are wearing hats of different colors.", "type": "attribute", "weight": 2 }, { "content": "The man in the foreground of the camera is wearing a gray hat, and the woman on the far right of the screen is wearing a pink hat.", "type": "attribute", "weight": 1 }, { "content": "The man in the passenger seat is wearing a blue hat, a dark blue vest, and black-framed glasses.", "type": "attribute", "weight": 1 }, { "content": "The person on the left side of the screen is wearing white transparent glasses.", "type": "attribute", "weight": 1 }, { "content": "There are green trees and grass outside the transparent window of the car.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera switches to a winding path. The path is surrounded by green grass and tall trees. A man in dark clothes runs out of the woods on the right side of the screen. The man holds a stick in his hand. The camera gradually zooms out. After standing on the path for a few seconds, the man runs along the path to the front of the screen.", "visual_elements": [ { "content": "Finally, the camera switches to a winding road.", "type": "camera", "weight": 3 }, { "content": "The road is surrounded by green grass and tall trees.", "type": "scene", "weight": 3 }, { "content": "The man in dark clothes runs out of the woods on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The man is holding a stick.", "type": "attribute", "weight": 2 }, { "content": "The camera gradually zooms out.", "type": "camera", "weight": 3 }, { "content": "The man stands on the road for a few seconds, then runs along the road to the front of the screen.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 24 }, { "index": "TUNA_0215", "video_path": "VELOCITI/v_6cwIlJvjP6k_seg_85_95.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on three people standing in an indoor scene. On the left side of the screen, half of the face of a young man is in the frame. In the middle of the frame is a middle-aged white man wearing black clothes. On the right side of the frame is a young white woman with blond hair. All three are looking down. The woman on the right side of the frame turns her head to the right side of the frame. There is a geometric steel frame structure on the ceiling in the background.\nNext, the camera switches to a bird's-eye view. A short-haired man is lying face down on the ground full of water. The short-haired man is wearing black pants and a blue shirt, with a large red bloodstain around his head. Water is gushing out from both sides.\nThen, the camera switches to the three-person scene again. The woman on the right side of the screen has turned her head. The middle-aged man in the middle of the screen raises his eyes slightly.\nThen, the camera switches to a high-angle view. The screen shows the interior of a multi-story building. Many people in the screen are lying on transparent railings and looking down. There are many round columns inside the building.\nFinally, the camera switches to a bird's-eye view again. The man on the ground is lying in a circle of fountains. Two people come out from the right side of the screen and jump into the water and run towards the man lying on the ground. There are many spectators around.", "events": [ { "event": "The video begins with the camera focusing on three people standing in an indoor scene. On the left side of the screen, half of the face of a young man is in the frame. In the middle of the frame is a middle-aged white man wearing black clothes. On the right side of the frame is a young white woman with blond hair. All three are looking down. The woman on the right side of the frame turns her head to the right side of the frame. There is a geometric steel frame structure on the ceiling in the background.", "visual_elements": [ { "content": "The camera is focused on three people standing in an indoor scene.", "type": "camera", "weight": 3 }, { "content": "Half of the face of a young man on the left side of the screen is in the picture.", "type": "attribute", "weight": 1 }, { "content": "In the middle of the picture is a middle-aged white man wearing black clothes.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the screen is a young white woman with blond hair.", "type": "attribute", "weight": 2 }, { "content": "All three are looking down.", "type": "action", "weight": 3 }, { "content": "The woman on the right side of the screen turns her head to the right side of the screen.", "type": "action", "weight": 1 }, { "content": "There is a geometric steel frame structure on the ceiling in the background.", "type": "scene", "weight": 3 } ] }, { "event": "Next, the camera switches to a bird's-eye view. A short-haired man is lying face down on the ground full of water. The short-haired man is wearing black pants and a blue shirt, with a large red bloodstain around his head. Water is gushing out from both sides.", "visual_elements": [ { "content": "Next, the camera switches to a bird's-eye view.", "type": "camera", "weight": 3 }, { "content": "A short-haired man is lying headfirst on the ground full of water.", "type": "attribute", "weight": 3 }, { "content": "The short-haired man is wearing black pants and a blue top.", "type": "attribute", "weight": 2 }, { "content": "There is a large red bloodstain around the short-haired man's head.", "type": "scene", "weight": 3 }, { "content": "Water gushes out from both sides.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to the three-person scene again. The woman on the right side of the screen has turned her head. The middle-aged man in the middle of the screen raises his eyes slightly.", "visual_elements": [ { "content": "Then, the camera switches to a three-person scene again.", "type": "camera", "weight": 3 }, { "content": "The woman on the right side of the screen has turned her head.", "type": "action", "weight": 1 }, { "content": "The middle-aged man in the middle of the screen slightly raises his eyes.", "type": "action", "weight": 1 } ] }, { "event": "Then, the camera switches to a high-angle view. The screen shows the interior of a multi-story building. Many people in the screen are lying on transparent railings and looking down. There are many round columns inside the building.", "visual_elements": [ { "content": "Then, the camera switches to a high-altitude view.", "type": "camera", "weight": 3 }, { "content": "The picture shows the interior of a multi-story building.", "type": "scene", "weight": 3 }, { "content": "Many people in the picture are lying on transparent railings and looking down.", "type": "action", "weight": 3 }, { "content": "There are many round columns inside the building.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera switches to a bird's-eye view again. The man on the ground is lying in a circle of fountains. Two people come out from the right side of the screen and jump into the water and run towards the man lying on the ground. There are many spectators around.", "visual_elements": [ { "content": "Finally, the camera switches to a bird's-eye view again.", "type": "camera", "weight": 3 }, { "content": "The man on the ground is lying in a circle of fountains.", "type": "attribute", "weight": 3 }, { "content": "Two people come out from the right side of the screen and jump into the water and run towards the lying man.", "type": "action", "weight": 3 }, { "content": "There are many spectators around.", "type": "scene", "weight": 3 } ] } ], "n_events": 5, "n_elements": 23 }, { "index": "TUNA_0216", "video_path": "VELOCITI/v_6lRIgVCdLP4_seg_140_150.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a middle-aged white male character. The man is wearing a white short-sleeved shirt and blue ripped jeans. The man is half lying on the floor, leaning against the sofa. The man's right hand is holding the leg of the furniture in front of him, and his eyes are looking to the left of the screen. The camera is slightly zoomed out. The man quickly gets up from the ground and runs to the left side of the screen.\nNext, the camera switches to an exterior scene. The camera focuses on another middle-aged male character with short hair. The man is wearing dark clothes and holding a rifle with a scope. He is lying on the ground aiming forward. The background is a dark night.\nThen, the camera switches to the outside of a wooden hut. The camera quickly moves to the left. A third man appears on the right side of the screen. The man is wearing a plaid shirt and holding a rifle, standing at the window of the hut and aiming into the house. There is a light inside the hut. On the far left of the screen is a white boy wearing a white short-sleeved shirt and black pants.\nThen, the camera focuses on the first man in the white short-sleeved shirt again. The man in the white shirt opens the curtain and is running out of the house. The man in the plaid shirt points a rifle at the man in the white shirt. The man in the shirt suddenly falls to the side of the hut on the left side of the screen. The little boy keeps looking at the two men on the left side of the screen. There is a rope hanging on the hut on the left side of the screen.\nThen, the camera switches to a second middle-aged man with short hair, who is loading his gun.\nThe camera returns to the outside of the wooden hut, focusing on the shocked expression of the little boy. There is darkness behind the boy. There are several yellow lights.\nThe camera switches to a new scene, a messy warehouse. A fourth man appears on the right side of the screen. The man holding a rifle and wearing a dark suit carefully walks from the right side of the screen to the center of the screen. Several lights are on in the warehouse.\nFinally, the camera switches to the little boy again, who points his right hand to the left of the screen in shock.", "events": [ { "event": "At the beginning of the video, the camera focuses on a middle-aged white male character. The man is wearing a white short-sleeved shirt and blue ripped jeans. The man is half lying on the floor, leaning against the sofa. The man's right hand is holding the leg of the furniture in front of him, and his eyes are looking to the left of the screen. The camera is slightly zoomed out. The man quickly gets up from the ground and runs to the left side of the screen.", "visual_elements": [ { "content": "The camera focuses on a middle-aged white male character.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a white short-sleeved shirt and blue ripped jeans.", "type": "attribute", "weight": 3 }, { "content": "The man is half-lying on the floor, leaning against the sofa.", "type": "action", "weight": 3 }, { "content": "The man's right hand is holding the leg of the furniture in front of him, and his eyes are looking to the left of the screen.", "type": "action", "weight": 2 }, { "content": "The camera zooms out slightly.", "type": "camera", "weight": 3 }, { "content": "The man quickly gets up from the ground and runs to the left of the screen.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to an exterior scene. The camera focuses on another middle-aged male character with short hair. The man is wearing dark clothes and holding a rifle with a scope. He is lying on the ground aiming forward. The background is a dark night.", "visual_elements": [ { "content": "Next, the camera switches to an exterior scene.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on another middle-aged male character with short hair.", "type": "camera", "weight": 3 }, { "content": "The man is wearing dark clothes and holding a rifle with a scope.", "type": "attribute", "weight": 3 }, { "content": "The man is lying on the ground and aiming forward.", "type": "action", "weight": 2 }, { "content": "The background is a dark night.", "type": "scene", "weight": 3 } ] }, { "event": "Then, the camera switches to the outside of a wooden hut. The camera quickly moves to the left. A third man appears on the right side of the screen. The man is wearing a plaid shirt and holding a rifle, standing at the window of the hut and aiming into the house. There is a light inside the hut. On the far left of the screen is a white boy wearing a white short-sleeved shirt and black pants.", "visual_elements": [ { "content": "Next, the camera switches to the outside of a wooden hut.", "type": "camera", "weight": 3 }, { "content": "The camera moves quickly to the left.", "type": "camera", "weight": 3 }, { "content": "A third man appears on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a plaid shirt.", "type": "attribute", "weight": 1 }, { "content": "The man in the plaid shirt stands at the window of the hut and looks into the house.", "type": "action", "weight": 3 }, { "content": "There is a white boy wearing a white short-sleeved shirt and black pants on the far left of the screen.", "type": "attribute", "weight": 2 }, { "content": "There is a light in the hut.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera focuses on the first man in the white short-sleeved shirt again. The man in the white shirt opens the curtain and is running out of the house. The man in the plaid shirt points a rifle at the man in the white shirt. The man in the shirt suddenly falls to the side of the hut on the left side of the screen. The little boy keeps looking at the two men on the left side of the screen. There is a rope hanging on the hut on the left side of the screen.", "visual_elements": [ { "content": "Then, the camera focuses on the first man in a white short-sleeved shirt again.", "type": "camera", "weight": 3 }, { "content": "The man in the white shirt opens the curtain and is running out of the house.", "type": "action", "weight": 3 }, { "content": "The man in the plaid shirt points a rifle at the man in the white shirt.", "type": "action", "weight": 3 }, { "content": "The man in the shirt suddenly falls to the side of the hut on the left side of the screen.", "type": "action", "weight": 2 }, { "content": "The little boy keeps looking at the two men on the left side of the screen.", "type": "action", "weight": 1 }, { "content": "A rope is hanging on the hut on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to a second middle-aged man with short hair, who is loading his gun.", "visual_elements": [ { "content": "Then, the camera switches to the second middle-aged man with short hair.", "type": "camera", "weight": 3 }, { "content": "The short-haired man is loading the gun.", "type": "action", "weight": 3 } ] }, { "event": "The camera returns to the outside of the wooden hut, focusing on the shocked expression of the little boy. There is darkness behind the boy. There are several yellow lights.", "visual_elements": [ { "content": "The camera returns to the outside of the wooden hut.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the shocked expression of the little boy.", "type": "camera", "weight": 3 }, { "content": "It is dark behind the boy.", "type": "scene", "weight": 3 }, { "content": "There are several yellow lights behind the boy.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a new scene, a messy warehouse. A fourth man appears on the right side of the screen. The man holding a rifle and wearing a dark suit carefully walks from the right side of the screen to the center of the screen. Several lights are on in the warehouse.", "visual_elements": [ { "content": "The camera switches to a new scene.", "type": "camera", "weight": 3 }, { "content": "The background is a messy warehouse.", "type": "scene", "weight": 3 }, { "content": "There are several lights on in the warehouse.", "type": "scene", "weight": 2 }, { "content": "The fourth man appears on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man holding a rifle and the man in a dark suit carefully walk from the right side of the screen to the middle of the screen.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera switches to the little boy again, who points his right hand to the left of the screen in shock.", "visual_elements": [ { "content": "Finally, the camera switches to the little boy again.", "type": "camera", "weight": 3 }, { "content": "The little boy points his right hand to the left side of the screen in shock.", "type": "action", "weight": 3 } ] } ], "n_events": 8, "n_elements": 37 }, { "index": "TUNA_0217", "video_path": "VELOCITI/v_6rl0rXHWtbQ_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a little white boy. The boy has short brown curly hair and wears a striped vest with a white shirt and a matching bow tie around his neck. The boy wears a black watch on his left hand and has bandages of different colors wrapped around his fingers. The boy clenches his hands. The background is a corridor decorated with arched doors and wall lights. The walls of the corridor are mainly yellow and blue. The wall on the right side of the picture has signs of peeling. There are some uniquely shaped windows on the wall.\nThen, the camera switches to a little white girl. The girl has black hair and wears a gray dress with dark stripes. The girl bends down and grabs the boy's collar. In the background, there are some unique pink decorations on the windows on the left wall of the picture.\nThen, the camera switches angles several times to show the facial expressions of the boy and girl. The boy remains relaxed, while the girl is angry. The boy is lifted high by the girl. The boy smiles and clenches his left fist. He throws a small transparent bag with his right hand and the girl catches it.\nFinally, the camera zooms in on the small bag in the girl's hand, with \"MACHETE ELECTROSHOCK GUMBALLS\" written in black in the middle of the bag.", "events": [ { "event": "At the beginning of the video, the camera is focused on a little white boy. The boy has short brown curly hair and wears a striped vest with a white shirt and a matching bow tie around his neck. The boy wears a black watch on his left hand and has bandages of different colors wrapped around his fingers. The boy clenches his hands. The background is a corridor decorated with arched doors and wall lights. The walls of the corridor are mainly yellow and blue. The wall on the right side of the picture has signs of peeling. There are some uniquely shaped windows on the wall.", "visual_elements": [ { "content": "The camera is focused on a little white boy.", "type": "camera", "weight": 3 }, { "content": "The boy has short brown curly hair and wears a striped vest with a white shirt and a matching bow tie.", "type": "attribute", "weight": 3 }, { "content": "The boy wears a black watch on his left hand and has bandages of different colors on his fingers.", "type": "attribute", "weight": 2 }, { "content": "The boy clenches his hands.", "type": "action", "weight": 3 }, { "content": "The background is a corridor decorated with arched doors and wall lights.", "type": "scene", "weight": 3 }, { "content": "The walls of the corridor are mainly yellow and blue.", "type": "scene", "weight": 2 }, { "content": "The wall on the right side of the screen has signs of peeling.", "type": "scene", "weight": 1 }, { "content": "There are some uniquely shaped windows on the wall.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to a little white girl. The girl has black hair and wears a gray dress with dark stripes. The girl bends down and grabs the boy's collar. In the background, there are some unique pink decorations on the windows on the left wall of the picture.", "visual_elements": [ { "content": "Then, the camera switches to a little white girl.", "type": "camera", "weight": 3 }, { "content": "The girl has black hair and wears a gray dress with dark stripes.", "type": "attribute", "weight": 3 }, { "content": "The girl bends down and grabs the boy's collar.", "type": "action", "weight": 3 }, { "content": "In the background, there are some unique pink decorations on the windows on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches angles several times to show the facial expressions of the boy and girl. The boy remains relaxed, while the girl is angry. The boy is lifted high by the girl. The boy smiles and clenches his left fist. He throws a small transparent bag with his right hand and the girl catches it.", "visual_elements": [ { "content": "The camera switches angles several times to show the facial expressions of the boy and girl.", "type": "camera", "weight": 3 }, { "content": "The boy remains relaxed, while the girl is more angry.", "type": "attribute", "weight": 3 }, { "content": "The boy is lifted high by the girl.", "type": "action", "weight": 3 }, { "content": "The boy smiles and clenches his left fist. He throws a small transparent bag with his right hand and the girl catches it.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera zooms in on the small bag in the girl's hand, with \"MACHETE ELECTROSHOCK GUMBALLS\" written in black in the middle of the bag.", "visual_elements": [ { "content": "Finally, the camera closes up the small bag in the girl's hand.", "type": "camera", "weight": 3 }, { "content": "The black part in the middle of the bag says \"MACHETE ELECTROSHOCK GUMBALLS\".", "type": "attribute", "weight": 3 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0218", "video_path": "VELOCITI/v_7Za7WMgQqKY_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at the ground, showing the legs and feet of four people. Two people are standing on each side of the screen. They are all wearing black suit pants and black leather shoes, standing on a smooth ground. There are some shadows on the ground, showing their outlines. There is a gray pillar in the background.\nNext, the camera switches to a close-up of two middle-aged Asian women wearing traditional black clothing. The first woman in the frame is a woman wearing black-framed glasses. Then another middle-aged woman enters the frame from the bottom and stands on the right side of the frame. The woman on the left side of the frame looks forward in shock. The woman on the right side of the frame approaches the woman in front and speaks. The background is a gray wall.", "events": [ { "event": "At the beginning of the video, the camera is pointed at the ground, showing the legs and feet of four people. Two people are standing on each side of the screen. They are all wearing black suit pants and black leather shoes, standing on a smooth ground. There are some shadows on the ground, showing their outlines. There is a gray pillar in the background.", "visual_elements": [ { "content": "The camera is pointed at the ground.", "type": "camera", "weight": 3 }, { "content": "The camera shows the legs and feet of four people.", "type": "camera", "weight": 3 }, { "content": "Two people are standing on each side of the screen.", "type": "attribute", "weight": 3 }, { "content": "They are all wearing black suit pants and black leather shoes, standing on a smooth ground.", "type": "attribute", "weight": 3 }, { "content": "There are some shadows on the ground, showing their outlines.", "type": "scene", "weight": 3 }, { "content": "There is a gray pillar in the background.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera switches to a close-up of two middle-aged Asian women wearing traditional black clothing. The first woman in the frame is a woman wearing black-framed glasses. Then another middle-aged woman enters the frame from the bottom and stands on the right side of the frame. The woman on the left side of the frame looks forward in shock. The woman on the right side of the frame approaches the woman in front and speaks. The background is a gray wall.", "visual_elements": [ { "content": "Next, the camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The camera shows two middle-aged Asian women wearing traditional black clothes.", "type": "camera", "weight": 3 }, { "content": "The first woman in the picture is a woman wearing black-framed glasses.", "type": "attribute", "weight": 3 }, { "content": "Then another middle-aged woman enters the picture from the bottom and stands on the right side of the picture.", "type": "attribute", "weight": 2 }, { "content": "The woman on the left side of the picture looks forward in shock.", "type": "action", "weight": 2 }, { "content": "The woman on the right side of the picture approaches the woman in front and talks.", "type": "action", "weight": 2 }, { "content": "The background is a gray wall.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0219", "video_path": "VELOCITI/v_7Zx5tpFmv9M_seg_110_120.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a young white man. The man is wearing a black T-shirt and a black watch on his left hand. The man walks to the right side of the screen and reaches out to take a black coat from the hanger. Then, the man turns around and quickly puts the clothes on. The camera remains in the man's side view. The background is a room with wooden decorations. The walls of the room are covered with dark brown wooden boards, and there is a dark brown wooden door with a transparent window in the background. An old-fashioned wall clock hangs on the left side of the door.\nThe camera switches to another middle-aged white woman. The woman is wearing a rose-red shirt and a light pink cardigan. The woman has short black curly hair and white pearl earrings on her ears. The woman walks out from the right side of the screen and stands by the partition wall of the room. The woman's eyes are gentle, and her fingers are pointing in circles to the sides of her eyes. There are mirrors and several decorative paintings on the background wall.\nAt the end of the video, the camera switches to a picture of two people in the same frame. The man stands on the right side of the picture, and the woman stands on the left side of the picture. The two stand opposite each other.", "events": [ { "event": "The video begins with the camera focused on a young white man. The man is wearing a black T-shirt and a black watch on his left hand. The man walks to the right side of the screen and reaches out to take a black coat from the hanger. Then, the man turns around and quickly puts the clothes on. The camera remains in the man's side view. The background is a room with wooden decorations. The walls of the room are covered with dark brown wooden boards, and there is a dark brown wooden door with a transparent window in the background. An old-fashioned wall clock hangs on the left side of the door.", "visual_elements": [ { "content": "The camera is pointed at a young white man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black T-shirt and a black watch on his left hand.", "type": "attribute", "weight": 3 }, { "content": "The man walks to the right side of the screen and reaches out to take a black jacket from the hanger.", "type": "action", "weight": 3 }, { "content": "Then, the man turns around and quickly puts on the clothes.", "type": "action", "weight": 3 }, { "content": "The camera remains in the man's side view.", "type": "camera", "weight": 3 }, { "content": "The background is a room with wooden decorations.", "type": "scene", "weight": 3 }, { "content": "The walls of the room are covered with dark brown wooden boards.", "type": "scene", "weight": 2 }, { "content": "There is a dark brown wooden door with a transparent window in the background.", "type": "scene", "weight": 2 }, { "content": "An old-fashioned wall clock hangs on the left side of the door.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches to another middle-aged white woman. The woman is wearing a rose-red shirt and a light pink cardigan. The woman has short black curly hair and white pearl earrings on her ears. The woman walks out from the right side of the screen and stands by the partition wall of the room. The woman's eyes are gentle, and her fingers are pointing in circles to the sides of her eyes. There are mirrors and several decorative paintings on the background wall.", "visual_elements": [ { "content": "The camera switches to another middle-aged white woman.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a rose-red shirt and a light pink cardigan.", "type": "attribute", "weight": 3 }, { "content": "The woman has short black curly hair and white pearl earrings on her ears.", "type": "attribute", "weight": 2 }, { "content": "The woman walks out from the right side of the screen and stands by the partition wall of the room.", "type": "action", "weight": 3 }, { "content": "The woman's eyes are gentle, and her fingers are pointing in circles to both sides of her eyes.", "type": "action", "weight": 3 }, { "content": "There are mirrors and several decorative paintings on the background wall.", "type": "scene", "weight": 3 } ] }, { "event": "At the end of the video, the camera switches to a picture of two people in the same frame. The man stands on the right side of the picture, and the woman stands on the left side of the picture. The two stand opposite each other.", "visual_elements": [ { "content": "At the end of the video, the camera switches to a picture of two people in the same frame.", "type": "camera", "weight": 3 }, { "content": "The man stands on the right side of the screen, and the woman stands on the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The two stand opposite each other.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0220", "video_path": "VELOCITI/v_7Zx5tpFmv9M_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a middle-aged white woman. The woman is wearing a rose-red shirt and a light pink cardigan. The woman has short black curly hair and white pearl earrings on her ears. The woman is sitting at a table with a smile and her right hand on her face. The background is green curtains with white patterns and white lace tulle curtains. There are light pink flowers on the table.\nThen, the camera switches to a young white man. The man is wearing a black T-shirt and sitting on the other side of the same table. The man is smoking and there is smoke in front of him.\nThe camera switches back to the woman. She puts her hands down, her face slightly serious, and her eyes occasionally look to the right of the camera.\nFinally, the camera zooms out. Two people are sitting on opposite sides of a table. The woman is on the right side of the screen, and the man is on the left side. The woman is about to get up and leave. There is an ashtray and some sundries on the table. In the background, a corner of a brown wooden table is revealed on the right side of the screen. There are photo frames and various decorations on the table.", "events": [ { "event": "At the beginning of the video, the camera is focused on a middle-aged white woman. The woman is wearing a rose-red shirt and a light pink cardigan. The woman has short black curly hair and white pearl earrings on her ears. The woman is sitting at a table with a smile and her right hand on her face. The background is green curtains with white patterns and white lace tulle curtains. There are light pink flowers on the table.", "visual_elements": [ { "content": "The camera is pointed at a middle-aged white woman.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a rose-red shirt and a light pink cardigan.", "type": "attribute", "weight": 3 }, { "content": "The woman has short black curly hair and white pearl earrings on her ears.", "type": "attribute", "weight": 2 }, { "content": "The woman is sitting at a table with a smile on her face, with her right hand on her face.", "type": "action", "weight": 3 }, { "content": "The background is green curtains with white patterns and white lace curtains.", "type": "scene", "weight": 3 }, { "content": "There are light pink flowers on the table.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to a young white man. The man is wearing a black T-shirt and sitting on the other side of the same table. The man is smoking and there is smoke in front of him.", "visual_elements": [ { "content": "Then, the camera switches to a young white man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black T-shirt and sitting on the other side of the same table.", "type": "attribute", "weight": 3 }, { "content": "The man is smoking, and there is smoke in front of him.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches back to the woman. She puts her hands down, her face slightly serious, and her eyes occasionally look to the right of the camera.", "visual_elements": [ { "content": "The camera switches back to the woman again.", "type": "camera", "weight": 3 }, { "content": "The woman puts her hand down, her face is a little serious, and her eyes occasionally look to the right of the camera.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera zooms out. Two people are sitting on opposite sides of a table. The woman is on the right side of the screen, and the man is on the left side. The woman is about to get up and leave. There is an ashtray and some sundries on the table. In the background, a corner of a brown wooden table is revealed on the right side of the screen. There are photo frames and various decorations on the table.", "visual_elements": [ { "content": "Finally, the camera pulls away.", "type": "camera", "weight": 3 }, { "content": "The two sit on both sides of the table.", "type": "attribute", "weight": 3 }, { "content": "The woman is on the right side of the picture, and the man is on the left side of the picture.", "type": "attribute", "weight": 3 }, { "content": "The woman is about to get up and leave.", "type": "action", "weight": 2 }, { "content": "There is an ashtray and some sundries on the table.", "type": "scene", "weight": 2 }, { "content": "In the background, a corner of a brown wooden table is exposed on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "There are photo frames and various decorations on the table.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0221", "video_path": "VELOCITI/v_7_ip79SGVLo_seg_110_120.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a middle-aged white male. The camera keeps a close-up shot. The man is wearing a white T-shirt and has a stethoscope around his neck. The man sits in a chair and leans back. The man wears a black watch on his left wrist and rests his elbow on the armrest of the chair. The man's fingers are sometimes placed on his cheek and sometimes on his mouth. Then, the man stands up and walks to the right of the screen. The background is a simple room. The walls are light green. There is a gray table on the right side of the screen. There is a gray phone on the table. There is also a brown chair next to the table.\nThen, the camera switches to a middle-aged black woman. The woman has brown hair and pink nails. The woman is wearing a black suit with a white shirt underneath. The woman is sitting behind a desk, holding a cigarette in her right hand and resting her elbow on the desk. There is a light green door on the right side of the screen, blending into the wall.\nFinally, there is another person in a grey suit standing at the doorway behind the woman. The white man is wearing light-coloured pants and a black handgun on his black belt. The white man walks past the woman, and the person at the door puts his left hand behind the man, and they leave the room together. The camera always focuses on the woman on the right side of the screen, and the only movement in the background is the two people leaving.", "events": [ { "event": "The video begins with the camera focusing on a middle-aged white male. The camera keeps a close-up shot. The man is wearing a white T-shirt and has a stethoscope around his neck. The man sits in a chair and leans back. The man wears a black watch on his left wrist and rests his elbow on the armrest of the chair. The man's fingers are sometimes placed on his cheek and sometimes on his mouth. Then, the man stands up and walks to the right of the screen. The background is a simple room. The walls are light green. There is a gray table on the right side of the screen. There is a gray phone on the table. There is also a brown chair next to the table.", "visual_elements": [ { "content": "The camera is focused on a middle-aged white male.", "type": "camera", "weight": 3 }, { "content": "The camera keeps a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a white T-shirt and a stethoscope around his neck.", "type": "attribute", "weight": 3 }, { "content": "The man is sitting on a chair, leaning back.", "type": "attribute", "weight": 2 }, { "content": "The man is wearing a black watch on his left wrist and his elbow is resting on the armrest of the chair.", "type": "attribute", "weight": 2 }, { "content": "The man's fingers are sometimes placed on his cheek and sometimes on his mouth.", "type": "action", "weight": 3 }, { "content": "Then, the man stands up and walks to the right of the screen.", "type": "action", "weight": 2 }, { "content": "The background is a simple room.", "type": "scene", "weight": 3 }, { "content": "The walls are light green.", "type": "scene", "weight": 2 }, { "content": "There is a gray table on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "There is a gray phone on the table.", "type": "scene", "weight": 1 }, { "content": "There is also a brown chair next to the table.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to a middle-aged black woman. The woman has brown hair and pink nails. The woman is wearing a black suit with a white shirt underneath. The woman is sitting behind a desk, holding a cigarette in her right hand and resting her elbow on the desk. There is a light green door on the right side of the screen, blending into the wall.", "visual_elements": [ { "content": "Then, the camera switches to a middle-aged black woman.", "type": "camera", "weight": 3 }, { "content": "The woman has brown hair and pink nails.", "type": "attribute", "weight": 3 }, { "content": "The woman is wearing a black suit with a white shirt underneath.", "type": "attribute", "weight": 3 }, { "content": "The woman is sitting behind a table, holding a cigarette in her right hand and resting her elbow on the table.", "type": "action", "weight": 3 }, { "content": "There is a light green door on the right side of the screen, blending into the wall.", "type": "scene", "weight": 3 } ] }, { "event": "Finally, there is another person in a grey suit standing at the doorway behind the woman. The white man is wearing light-coloured pants and a black handgun on his black belt. The white man walks past the woman, and the person at the door puts his left hand behind the man, and they leave the room together. The camera always focuses on the woman on the right side of the screen, and the only movement in the background is the two people leaving.", "visual_elements": [ { "content": "Another person in a gray suit is standing in the doorway behind the woman.", "type": "attribute", "weight": 1 }, { "content": "The white man is wearing light-colored pants and a black handgun on his black belt.", "type": "attribute", "weight": 2 }, { "content": "The white man walks past the woman.", "type": "action", "weight": 2 }, { "content": "The person at the door puts his left hand behind the man and they leave the room together.", "type": "action", "weight": 1 }, { "content": "The camera is always focused on the woman on the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "The only movement of the two people leaving is in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0222", "video_path": "VELOCITI/v_7tUYeqOLuYU_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video is an animation. At the beginning of the video, the camera is pointed at a hospital corridor. A young black male doctor in a white coat stands in the middle of the corridor with his back to the camera and a stack of documents in his hand. At the end of the corridor are two double yellow wooden doors. The walls of the corridor are light blue and white. There are two yellow wooden doors on the right side of the screen. There is a green \"EXIT\" sign on the wall on the right side of the screen, indicating the direction of the exit. There is also a white hand sanitizer dispenser on the wall on the right side of the screen. There is a yellow wooden chair on the ground on the left side of the screen.\nThen, an animal character wearing green surgical gown enters the frame from the right, crawling forward on the ground. The animal character has yellow skin, a green surgical cap, and gray overshoes. Then, as the animal character passes by the black doctor, it stands up and walks toward the double doors at the end of the corridor. The black doctor walks in the opposite direction of the character.\nFinally, the camera switches to another corridor. Another person wearing light green surgical gown and short yellow hair enters the screen from the left side with his back to the camera. The person with short yellow hair pushes a cart full of towels through the corridor and turns to the left side of the screen. There is a painting hanging on the wall at the end of the corridor on the right side of the screen. There is a yellow wooden door on the right side of the screen. There is a red fire extinguisher at the corner of the corridor on the right side of the screen.", "events": [ { "event": "The video is an animation. At the beginning of the video, the camera is pointed at a hospital corridor. A young black male doctor in a white coat stands in the middle of the corridor with his back to the camera and a stack of documents in his hand. At the end of the corridor are two double yellow wooden doors. The walls of the corridor are light blue and white. There are two yellow wooden doors on the right side of the screen. There is a green \"EXIT\" sign on the wall on the right side of the screen, indicating the direction of the exit. There is also a white hand sanitizer dispenser on the wall on the right side of the screen. There is a yellow wooden chair on the ground on the left side of the screen.", "visual_elements": [ { "content": "The video is an animation.", "type": "camera", "weight": 3 }, { "content": "The camera is pointed at a hospital corridor.", "type": "camera", "weight": 3 }, { "content": "A young black male doctor in a white coat stands in the middle of the corridor with his back to the camera.", "type": "attribute", "weight": 3 }, { "content": "The black doctor holds a stack of documents in his hand.", "type": "attribute", "weight": 2 }, { "content": "At the end of the corridor are two double-opening yellow wooden doors.", "type": "scene", "weight": 2 }, { "content": "The walls of the corridor are light blue and white.", "type": "scene", "weight": 2 }, { "content": "There are two yellow wooden doors on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "A green \"EXIT\" sign is hung on the wall on the right side of the screen, indicating the direction of the exit.", "type": "scene", "weight": 1 }, { "content": "There is also a white hand sanitizer dispenser on the wall on the right side of the screen.", "type": "scene", "weight": 1 }, { "content": "There is a yellow wooden chair on the ground on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Then, an animal character wearing green surgical gown enters the frame from the right, crawling forward on the ground. The animal character has yellow skin, a green surgical cap, and gray overshoes. Then, as the animal character passes by the black doctor, it stands up and walks toward the double doors at the end of the corridor. The black doctor walks in the opposite direction of the character.", "visual_elements": [ { "content": "Then, an animal character in a green surgical gown enters the screen from the right side.", "type": "attribute", "weight": 3 }, { "content": "The animal character lies on the ground. The animal character crawls forward on the screen.", "type": "action", "weight": 3 }, { "content": "The animal character has yellow skin, a green surgical cap, and grey overshoes.", "type": "attribute", "weight": 3 }, { "content": "Then, as the animal character passes the black doctor, it stands up and walks toward the double doors at the end of the corridor.", "type": "action", "weight": 3 }, { "content": "The black doctor walks in the opposite direction of the character.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera switches to another corridor. Another person wearing light green surgical gown and short yellow hair enters the screen from the left side with his back to the camera. The person with short yellow hair pushes a cart full of towels through the corridor and turns to the left side of the screen. There is a painting hanging on the wall at the end of the corridor on the right side of the screen. There is a yellow wooden door on the right side of the screen. There is a red fire extinguisher at the corner of the corridor on the right side of the screen.", "visual_elements": [ { "content": "Finally, the camera switches to another corridor.", "type": "camera", "weight": 3 }, { "content": "Another person wearing light green surgical gown and short yellow hair enters the screen from the left side of the screen with his back to the camera.", "type": "attribute", "weight": 3 }, { "content": "The person with short yellow hair pushes a cart full of towels through the corridor and turns to the left side of the screen.", "type": "action", "weight": 3 }, { "content": "There is a painting on the wall at the end of the corridor on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "There is a yellow wooden door on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "There is a red fire extinguisher at the corner of the corridor on the right side of the screen.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0223", "video_path": "VELOCITI/v_7wYMAJSnpVo_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the face of a middle-aged white male with a beard, wearing a dark denim jacket and a blue shirt, standing on the street. There are some buildings and trees in the background.\nThe camera zooms out and switches to a scene where a man and a woman are talking. The camera is focused on the side of the two people. The man wears a silver round earring on his right ear. The young white woman on the right side of the screen has long hair, wears a loose green sweater and jeans, and carries a yellow bag on her shoulder. The woman smiles and extends her right hand to shake the man's hand. However, the man puts his hands in his pockets and turns to the left side of the screen. The background is a wall with light yellow and dark green. There are white and green graffiti on the wall. There is a black door on the right side of the screen. There is an electric meter box on the left side of the screen.\nThe camera switches to a close-up of the woman. The woman raises her hands helplessly. The woman wears a red bracelet on her right wrist. She wears a ring on each index finger. The ring on the woman's left hand is dark blue, and the ring on her right hand is silver.\nFinally, the camera zooms out again and follows the woman as she moves gradually to the left. The man walks to the car, opens the door and prepares to get in. The woman walks behind the man on the right side of the screen while swaying and talking.", "events": [ { "event": "At the beginning of the video, the camera focuses on the face of a middle-aged white male with a beard, wearing a dark denim jacket and a blue shirt, standing on the street. There are some buildings and trees in the background.", "visual_elements": [ { "content": "The camera focuses on the face of a middle-aged white male.", "type": "camera", "weight": 3 }, { "content": "The man has a beard and wears a dark denim jacket and a blue shirt.", "type": "attribute", "weight": 3 }, { "content": "The man stands on a street.", "type": "action", "weight": 3 }, { "content": "There are some buildings and trees in the background.", "type": "scene", "weight": 3 } ] }, { "event": "The camera zooms out and switches to a scene where a man and a woman are talking. The camera is focused on the side of the two people. The man wears a silver round earring on his right ear. The young white woman on the right side of the screen has long hair, wears a loose green sweater and jeans, and carries a yellow bag on her shoulder. The woman smiles and extends her right hand to shake the man's hand. However, the man puts his hands in his pockets and turns to the left side of the screen. The background is a wall with light yellow and dark green. There are white and green graffiti on the wall. There is a black door on the right side of the screen. There is an electric meter box on the left side of the screen.", "visual_elements": [ { "content": "The camera zooms out and switches to a scene where a man is talking to a woman.", "type": "camera", "weight": 3 }, { "content": "The camera is aimed at the side of the two.", "type": "camera", "weight": 2 }, { "content": "The man wears a silver round earring on his right ear.", "type": "attribute", "weight": 1 }, { "content": "The young white woman on the right side of the picture has shoulder-length hair and wears a loose green-patterned sweater and jeans.", "type": "attribute", "weight": 3 }, { "content": "The woman carries a yellow bag on her shoulder.", "type": "attribute", "weight": 1 }, { "content": "The woman smiles and extends her right hand to shake hands with the man.", "type": "action", "weight": 3 }, { "content": "However, the man puts his hands in his pockets and turns away. The woman walks away to the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The background is a wall with light yellow and dark green.", "type": "scene", "weight": 3 }, { "content": "There are white and green graffiti on the wall.", "type": "scene", "weight": 1 }, { "content": "There is a black door on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "There is an electric meter box on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches to a close-up of the woman. The woman raises her hands helplessly. The woman wears a red bracelet on her right wrist. She wears a ring on each index finger. The ring on the woman's left hand is dark blue, and the ring on her right hand is silver.", "visual_elements": [ { "content": "The camera switches to a close-up of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman raises her hands helplessly.", "type": "action", "weight": 3 }, { "content": "The woman wears a red bracelet on her right wrist.", "type": "attribute", "weight": 1 }, { "content": "The woman wears a ring on each index finger.", "type": "attribute", "weight": 1 }, { "content": "The ring on the woman's left hand is dark blue, and the ring on the right hand is silver.", "type": "attribute", "weight": 1 } ] }, { "event": "Finally, the camera zooms out again and follows the woman as she moves gradually to the left. The man walks to the car, opens the door and prepares to get in. The woman walks behind the man on the right side of the screen while swaying and talking.", "visual_elements": [ { "content": "Finally, the camera zooms out again and follows the woman to gradually move to the left.", "type": "camera", "weight": 3 }, { "content": "The man walks to the car, opens the door and prepares to get in.", "type": "action", "weight": 3 }, { "content": "The woman walks behind the man on the right side of the screen while swaying and talking.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0224", "video_path": "VELOCITI/v_85A2rWA5O3o_seg_120_130.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera zooming in on a puppy with brown and white fur walking on the floor toward the right of the frame. In the background is a blue children's chair and a yellow wooden cabinet with a pink towel hanging on it.\nThe camera moves with the dog's movements. The puppy jumps onto a white bed along the steps made of toys beside the bed. The bed is covered with white sheets with pink patterns. The camera is shot from the side, showing the puppy walking on the bed. In the background, there are white windows and curtains on the left side of the picture. Natural light comes in from the window. Small toys of various colors are placed on the wall on the right side of the picture.\nThen, the camera switches to a close-up. The camera first shoots from the side, then switches to an overhead shot. The puppy walks towards a little white girl sleeping on the bed, and then licks the girl's face and neck. The girl has short curly blond hair and is wearing pink pajamas.\nFinally, the camera switches to the girl being woken up and sitting up.The girl yawns and turns to pet the puppy.", "events": [ { "event": "The video begins with the camera zooming in on a puppy with brown and white fur walking on the floor toward the right of the frame. In the background is a blue children's chair and a yellow wooden cabinet with a pink towel hanging on it.", "visual_elements": [ { "content": "The camera is focused on a puppy.", "type": "camera", "weight": 3 }, { "content": "The puppy has brown and white fur.", "type": "attribute", "weight": 3 }, { "content": "The puppy is walking on the floor to the right of the screen.", "type": "action", "weight": 3 }, { "content": "There is a blue children's chair and a yellow wooden cabinet in the background.", "type": "scene", "weight": 2 }, { "content": "A pink towel is hung on the cabinet.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves with the dog's movements. The puppy jumps onto a white bed along the steps made of toys beside the bed. The bed is covered with white sheets with pink patterns. The camera is shot from the side, showing the puppy walking on the bed. In the background, there are white windows and curtains on the left side of the picture. Natural light comes in from the window. Small toys of various colors are placed on the wall on the right side of the picture.", "visual_elements": [ { "content": "The camera moves with the dog's movements.", "type": "camera", "weight": 3 }, { "content": "The puppy jumps onto a white bed along the steps made of toys beside the bed.", "type": "action", "weight": 3 }, { "content": "The bed is covered with white sheets with pink patterns.", "type": "scene", "weight": 3 }, { "content": "The camera is shot from the side, showing the puppy walking on the bed.", "type": "camera", "weight": 3 }, { "content": "In the background, there are white windows and curtains on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "Natural light comes in from the window.", "type": "scene", "weight": 1 }, { "content": "Small toys of various colors are placed against the wall on the right side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to a close-up. The camera first shoots from the side, then switches to an overhead shot. The puppy walks towards a little white girl sleeping on the bed, and then licks the girl's face and neck. The girl has short curly blond hair and is wearing pink pajamas.", "visual_elements": [ { "content": "Then, the camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The camera shoots from the side first, then switches to a bird's-eye view.", "type": "camera", "weight": 2 }, { "content": "The puppy walks towards a little white girl sleeping on the bed, and then licks the girl's face and neck.", "type": "action", "weight": 3 }, { "content": "The girl has short curly blond hair and wears pink pajamas.", "type": "attribute", "weight": 3 } ] }, { "event": "Finally, the camera switches to the girl being woken up and sitting up.The girl yawns and turns to pet the puppy.", "visual_elements": [ { "content": "Finally, the camera switches to the girl being woken up and sitting up.", "type": "camera", "weight": 3 }, { "content": "The girl yawns and turns to pet the puppy.", "type": "action", "weight": 1 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0225", "video_path": "VELOCITI/v_88YBTmbAaoY_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the glass of a telephone booth. There is an obvious bullet hole on the glass, and there are cracks all around. A middle-aged white man with a beard and wearing a purple shirt stands in the telephone booth, holding a phone. The man looks outside the booth nervously. The camera shoots from the side of the man.\nNext, the camera switches to a view through a scope. The camera shows a street with people walking to and fro. There is a white telephone booth in the center of the frame. Someone enters the booth from the right side of the frame. There is a building in the background. There are large advertisements on the windows of the building.\nThe camera switches back to the phone booth. The man looks around while talking on the phone. The camera first shoots the man's face and the interior of the phone booth from the side, and finally shoots the man's front from an upward angle.", "events": [ { "event": "At the beginning of the video, the camera focuses on the glass of a telephone booth. There is an obvious bullet hole on the glass, and there are cracks all around. A middle-aged white man with a beard and wearing a purple shirt stands in the telephone booth, holding a phone. The man looks outside the booth nervously. The camera shoots from the side of the man.", "visual_elements": [ { "content": "The camera focuses on the glass of a telephone booth.", "type": "camera", "weight": 3 }, { "content": "There is an obvious bullet hole on the glass, and there are cracks all around.", "type": "scene", "weight": 3 }, { "content": "A middle-aged white man with a beard and wearing a purple shirt stands in the booth, holding a phone.", "type": "attribute", "weight": 3 }, { "content": "The man looks nervously outside the booth.", "type": "action", "weight": 2 }, { "content": "The camera shoots from the side of the man.", "type": "camera", "weight": 3 } ] }, { "event": "Next, the camera switches to a view through a scope. The camera shows a street with people walking to and fro. There is a white telephone booth in the center of the frame. Someone enters the booth from the right side of the frame. There is a building in the background. There are large advertisements on the windows of the building.", "visual_elements": [ { "content": "Next, the camera switches to a perspective viewed through a scope.", "type": "camera", "weight": 3 }, { "content": "The camera shows a street with pedestrians coming and going.", "type": "camera", "weight": 3 }, { "content": "There is a white telephone booth in the center of the screen.", "type": "attribute", "weight": 1 }, { "content": "Someone enters the booth from the right side of the screen.", "type": "action", "weight": 1 }, { "content": "There is a building in the background.", "type": "scene", "weight": 3 }, { "content": "There are large advertisements on the windows of the building.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches back to the phone booth. The man looks around while talking on the phone. The camera first shoots the man's face and the interior of the phone booth from the side, and finally shoots the man's front from an upward angle.", "visual_elements": [ { "content": "The camera switches back to the booth again.", "type": "camera", "weight": 3 }, { "content": "The man looks around while talking on the phone.", "type": "action", "weight": 2 }, { "content": "The camera first shoots the man's face and the interior of the booth from the side.", "type": "camera", "weight": 3 }, { "content": "Finally, the camera shoots the man's front from an upward angle.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0226", "video_path": "VELOCITI/v_8_MpC8PcPQ0_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a close-up of a hand. The hand is clenched into a fist, with the words \"I must not tell lies\" engraved in red on the back of the hand. The wrist is covered with dark clothing. The background is a pink cloth with floral patterns embroidered on it.\nNext, the camera switches to a young white male. The man is wearing round-framed glasses and a dark blue robe with a white shirt and a brown striped tie. The man is sitting on an ornate chair. The man holds a brown feather pen in his hand and makes a gesture of writing in the book in front of him. The background is a room with exquisite vintage decoration. On the wall on the left side of the picture are several decorative plates with cat patterns. In front of the decorative plates is a delicate pink chair. Next to the decorative plates, the curtains in the room are pink with patterns.\nThe camera gradually moves to the upper right. Behind the man is an old white woman in pink clothes. The woman has short curly hair. The woman turns around and walks towards the man in the foreground with doubt. The woman stands behind the man to the right and looks at him. On the right side of the screen is a table full of delicate flower pots. There are pink flowers in the flower pots. There is a yellow gauze curtain in front of the table, and a ray of natural light shines through the gauze curtain.", "events": [ { "event": "The video begins with a close-up of a hand. The hand is clenched into a fist, with the words \"I must not tell lies\" engraved in red on the back of the hand. The wrist is covered with dark clothing. The background is a pink cloth with floral patterns embroidered on it.", "visual_elements": [ { "content": "The camera zooms in on a hand.", "type": "camera", "weight": 3 }, { "content": "The hand is clenched into a fist, and the words \"I must not tell lies\" are engraved in red on the back of the hand.", "type": "attribute", "weight": 3 }, { "content": "The wrist is covered with dark clothing.", "type": "attribute", "weight": 2 }, { "content": "The background is a pink cloth embroidered with a floral pattern.", "type": "scene", "weight": 3 } ] }, { "event": "Next, the camera switches to a young white male. The man is wearing round-framed glasses and a dark blue robe with a white shirt and a brown striped tie. The man is sitting on an ornate chair. The man holds a brown feather pen in his hand and makes a gesture of writing in the book in front of him. The background is a room with exquisite vintage decoration. On the wall on the left side of the picture are several decorative plates with cat patterns. In front of the decorative plates is a delicate pink chair. Next to the decorative plates, the curtains in the room are pink with patterns.", "visual_elements": [ { "content": "Next, the camera switches to a young white male.", "type": "camera", "weight": 3 }, { "content": "The man is wearing round-framed glasses, a dark blue robe, a white shirt and a brown striped tie.", "type": "attribute", "weight": 3 }, { "content": "The man is sitting on an ornately decorated chair.", "type": "action", "weight": 3 }, { "content": "The man is holding a brown feather pen and posing as if writing on a notebook in front of him.", "type": "action", "weight": 2 }, { "content": "The background is a room with exquisite retro decorations.", "type": "scene", "weight": 3 }, { "content": "On the wall on the left side of the picture are several decorative plates with cat patterns.", "type": "scene", "weight": 2 }, { "content": "In front of the decorative plates is an exquisite pink chair.", "type": "scene", "weight": 1 }, { "content": "Next to the decorative plates, the curtains in the room are pink with patterns.", "type": "scene", "weight": 2 } ] }, { "event": "The camera gradually moves to the upper right. Behind the man is an old white woman in pink clothes. The woman has short curly hair. The woman turns around and walks towards the man in the foreground with doubt. The woman stands behind the man to the right and looks at him. On the right side of the screen is a table full of delicate flower pots. There are pink flowers in the flower pots. There is a yellow gauze curtain in front of the table, and a ray of natural light shines through the gauze curtain.", "visual_elements": [ { "content": "The camera gradually moves to the upper right.", "type": "camera", "weight": 3 }, { "content": "Behind the man is an elderly white woman in pink clothes.", "type": "attribute", "weight": 3 }, { "content": "The woman has short curly hair.", "type": "attribute", "weight": 1 }, { "content": "The woman turns around and walks toward the man in the foreground in confusion.", "type": "action", "weight": 3 }, { "content": "The woman stands behind the man to the right and looks at him.", "type": "action", "weight": 2 }, { "content": "On the right side of the picture is a table full of exquisite flower pots.", "type": "scene", "weight": 2 }, { "content": "The flower pots contain pink flowers.", "type": "scene", "weight": 1 }, { "content": "A yellow gauze curtain is drawn in front of the table, and a ray of natural light shines through the curtain.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0227", "video_path": "VELOCITI/v_8bY4qPadkSo_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a kitchen scene. In the foreground is a middle-aged white woman with blond hair. The woman has her hair down on her shoulders and is wearing a light-colored sweater. The woman has her left hand on her cheek. As the woman speaks, her eyes look sadly toward the bottom right of the screen. The camera focuses mainly on the woman's face. In the background is a young white man wearing a gray sweater. The man is standing by the kitchen sink doing some household chores. The kitchen background includes cabinets, windows, and some kitchen utensils.\nThen, the camera focuses on the man. He is busy in the kitchen. After hearing the shocking news, he quickly walks to the woman on the left side of the screen. The man is bent over and speaks anxiously. In the background, natural light shines in from the window. There is a brown wooden cabinet on the kitchen wall. There are some bottles and jars on the cabinet, and there are some unwashed dishes in the sink.\nFinally, the camera focuses on the woman again. The woman puts her left hand to her mouth and her face trembles.", "events": [ { "event": "The video begins with the camera focusing on a kitchen scene. In the foreground is a middle-aged white woman with blond hair. The woman has her hair down on her shoulders and is wearing a light-colored sweater. The woman has her left hand on her cheek. As the woman speaks, her eyes look sadly toward the bottom right of the screen. The camera focuses mainly on the woman's face. In the background is a young white man wearing a gray sweater. The man is standing by the kitchen sink doing some household chores. The kitchen background includes cabinets, windows, and some kitchen utensils.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "In the foreground is a middle-aged white woman with blond hair.", "type": "attribute", "weight": 3 }, { "content": "The woman has her hair down on her shoulders and is wearing a light-colored sweater.", "type": "attribute", "weight": 2 }, { "content": "The woman has her left hand on her cheek.", "type": "action", "weight": 3 }, { "content": "As the woman speaks, her eyes look sadly toward the bottom right of the screen.", "type": "action", "weight": 3 }, { "content": "The camera focuses mainly on the woman's face.", "type": "camera", "weight": 3 }, { "content": "In the background is a young white man wearing a gray sweater.", "type": "scene", "weight": 3 }, { "content": "The man is standing by the kitchen sink doing some household chores.", "type": "action", "weight": 3 }, { "content": "The kitchen background includes cabinets, windows, and some kitchen utensils.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera focuses on the man. He is busy in the kitchen. After hearing the shocking news, he quickly walks to the woman on the left side of the screen. The man is bent over and speaks anxiously. In the background, natural light shines in from the window. There is a brown wooden cabinet on the kitchen wall. There are some bottles and jars on the cabinet, and there are some unwashed dishes in the sink.", "visual_elements": [ { "content": "Then, the camera focuses on the man.", "type": "camera", "weight": 3 }, { "content": "The man is busy in the kitchen.", "type": "action", "weight": 3 }, { "content": "After hearing the shocking news, the man quickly walks to the woman on the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The man is bent over and speaks anxiously.", "type": "action", "weight": 3 }, { "content": "In the background, there is natural light coming in from the window.", "type": "scene", "weight": 1 }, { "content": "There is a brown wooden cabinet on the kitchen wall.", "type": "scene", "weight": 2 }, { "content": "There are some bottles and jars on the cabinet.", "type": "scene", "weight": 1 }, { "content": "There are some unwashed dishes in the sink.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera focuses on the woman again. The woman puts her left hand to her mouth and her face trembles.", "visual_elements": [ { "content": "Finally, the camera focuses on the woman again.", "type": "camera", "weight": 3 }, { "content": "The woman puts her left hand to her mouth and her face trembles.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0228", "video_path": "VELOCITI/v_8nOx6uj46XQ_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a kitchen scene. There are three Asian men in the picture. The middle-aged man in the front right of the picture is wearing a white chef's uniform and busy at the workbench. Two men in the back are talking. The elderly man on the far right of the picture has gray hair and wears silver-framed glasses. The elderly man is wearing a black coat and a white inner wear. After the elderly man finishes talking, he walks out of the picture to the right. Another middle-aged man in the middle of the picture is also wearing a white chef's uniform and silver-framed glasses. There are some plastic containers with ingredients on the wooden workbench in the kitchen. The background of the kitchen is a light-colored tiled wall. There are some hooks and kitchen utensils on the wall. There is also a faucet and a stainless steel sink on the right side of the picture.\nNext, the camera switches to a close-up of the ingredients. The screen shows a transparent plastic box containing the ingredients. Four long pieces of food are soaked in yellow water in the box. The background is a silver stainless steel tabletop. There are other kitchen boxes next to the box.\nThe camera moves up and switches back to the kitchen scene. The only person left in the picture is the middle-aged man who was busy just now. The man lowers his head and focuses on the work in his hands. The background becomes blurred. There are some sundries on the ground on the left side of the picture.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen scene. There are three Asian men in the picture. The middle-aged man in the front right of the picture is wearing a white chef's uniform and busy at the workbench. Two men in the back are talking. The elderly man on the far right of the picture has gray hair and wears silver-framed glasses. The elderly man is wearing a black coat and a white inner wear. After the elderly man finishes talking, he walks out of the picture to the right. Another middle-aged man in the middle of the picture is also wearing a white chef's uniform and silver-framed glasses. There are some plastic containers with ingredients on the wooden workbench in the kitchen. The background of the kitchen is a light-colored tiled wall. There are some hooks and kitchen utensils on the wall. There is also a faucet and a stainless steel sink on the right side of the picture.", "visual_elements": [ { "content": "The camera is pointed at a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "There are three Asian men in the picture.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged man in the front right of the picture is wearing a white chef's uniform.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged man is busy at the operating table.", "type": "action", "weight": 3 }, { "content": "The two men in the back are talking.", "type": "action", "weight": 2 }, { "content": "The old man on the far right of the picture has gray hair and wears silver-framed glasses.", "type": "attribute", "weight": 2 }, { "content": "The old man is wearing a black coat and a white inner wear.", "type": "attribute", "weight": 2 }, { "content": "The old man walks out of the picture to the right after talking.", "type": "action", "weight": 1 }, { "content": "Another middle-aged man in the middle of the picture is also wearing a white chef's uniform and silver-framed glasses.", "type": "attribute", "weight": 2 }, { "content": "There are some plastic containers with ingredients on the wooden workbench in the kitchen.", "type": "scene", "weight": 1 }, { "content": "The background of the kitchen is a light-colored tile wall.", "type": "scene", "weight": 2 }, { "content": "There are some hooks and kitchen utensils on the wall.", "type": "scene", "weight": 1 }, { "content": "There is also a faucet and a stainless steel sink on the right side of the picture.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera switches to a close-up of the ingredients. The screen shows a transparent plastic box containing the ingredients. Four long pieces of food are soaked in yellow water in the box. The background is a silver stainless steel tabletop. There are other kitchen boxes next to the box.", "visual_elements": [ { "content": "Next, the camera switches to a close-up of ingredients.", "type": "camera", "weight": 3 }, { "content": "The picture shows a transparent plastic box containing ingredients.", "type": "attribute", "weight": 3 }, { "content": "Four long pieces of food are soaked in the yellow water in the box.", "type": "attribute", "weight": 3 }, { "content": "There are other kitchen boxes next to the box.", "type": "attribute", "weight": 1 }, { "content": "The background is a silver stainless steel tabletop.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves up and switches back to the kitchen scene. The only person left in the picture is the middle-aged man who was busy just now. The man lowers his head and focuses on the work in his hands. The background becomes blurred. There are some sundries on the ground on the left side of the picture.", "visual_elements": [ { "content": "The camera moves up and switches back to the kitchen scene.", "type": "camera", "weight": 3 }, { "content": "The only thing left in the picture is the middle-aged man who was busy just now.", "type": "attribute", "weight": 3 }, { "content": "The man lowers his head and focuses on the work in his hands.", "type": "action", "weight": 3 }, { "content": "The background becomes blurred.", "type": "scene", "weight": 3 }, { "content": "There are some sundries on the ground on the left side of the picture.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0229", "video_path": "VELOCITI/v_8nOx6uj46XQ_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the hands of a person wearing a white chef's uniform. A pair of hands is tying a belt of a white chef's uniform. In the background, another person wearing similar clothes is busy at the kitchen counter with his back to the camera.\nNext, the camera switches to the entrance of a restaurant. An older Asian man in a white chef's uniform stands at the door. The man has his hands behind his back and his body sways from time to time. The man is bald and wears glasses. The door frame of the restaurant is mainly brown wood. There are a few paintings on the wall of the restaurant, and the floor is clean and tidy. There is a sign with traditional Chinese characters on the right side of the screen.\nThen, the camera switches to the kitchen. The camera is shot from the side, showing the old man drinking water. Then, the old man puts down the cup of water and turns to look at the other side of the kitchen. In the background, another chef stands at the kitchen counter. A wooden steamer is hung on the white tile wall of the kitchen. There is a white sink on the right side of the screen.\nFinally, the camera closes up the old man's face. There is a wooden frame wall in the background.", "events": [ { "event": "At the beginning of the video, the camera focuses on the hands of a person wearing a white chef's uniform. A pair of hands is tying a belt of a white chef's uniform. In the background, another person wearing similar clothes is busy at the kitchen counter with his back to the camera.", "visual_elements": [ { "content": "The camera focuses on the hands of a person wearing a white chef's uniform.", "type": "camera", "weight": 3 }, { "content": "A pair of hands are tying a belt of a white chef's uniform.", "type": "action", "weight": 3 }, { "content": "Another person in similar clothing in the background has his back to the camera.", "type": "scene", "weight": 3 }, { "content": "The person in the background is busy at the kitchen counter.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera switches to the entrance of a restaurant. An older Asian man in a white chef's uniform stands at the door. The man has his hands behind his back and his body sways from time to time. The man is bald and wears glasses. The door frame of the restaurant is mainly brown wood. There are a few paintings on the wall of the restaurant, and the floor is clean and tidy. There is a sign with traditional Chinese characters on the right side of the screen.", "visual_elements": [ { "content": "Next, the camera switches to the entrance of a restaurant.", "type": "scene", "weight": 3 }, { "content": "An older Asian man in a white chef's uniform stands at the door.", "type": "attribute", "weight": 3 }, { "content": "The man has his hands behind his back and his body sways from time to time.", "type": "action", "weight": 3 }, { "content": "The man is bald and wears glasses.", "type": "attribute", "weight": 2 }, { "content": "The door frame of the restaurant is mainly brown wood.", "type": "scene", "weight": 2 }, { "content": "There are several paintings hanging on the walls of the restaurant.", "type": "scene", "weight": 1 }, { "content": "The floor of the restaurant is clean and tidy.", "type": "scene", "weight": 1 }, { "content": "There is a sign with traditional Chinese characters on the right side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to the kitchen. The camera is shot from the side, showing the old man drinking water. Then, the old man puts down the cup of water and turns to look at the other side of the kitchen. In the background, another chef stands at the kitchen counter. A wooden steamer is hung on the white tile wall of the kitchen. There is a white sink on the right side of the screen.", "visual_elements": [ { "content": "Then, the camera switches to the kitchen.", "type": "camera", "weight": 3 }, { "content": "The camera shoots from the side, showing the old man drinking water.", "type": "camera", "weight": 2 }, { "content": "Then, the old man puts down the cup of water and turns to look at the other side of the kitchen.", "type": "action", "weight": 2 }, { "content": "Another chef in the background stands at the kitchen counter.", "type": "scene", "weight": 2 }, { "content": "A wooden steamer is hung on the white tile wall of the kitchen.", "type": "scene", "weight": 2 }, { "content": "There is a white sink on the right side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera closes up the old man's face. There is a wooden frame wall in the background.", "visual_elements": [ { "content": "Finally, the camera closes up the old man's face.", "type": "camera", "weight": 3 }, { "content": "The background is a wooden frame wall.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0230", "video_path": "VELOCITI/v_9-cPWheNyaA_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on an open meadow. On the left side of the frame, a brown horse is lying on the ground. Next to the horse lies a middle-aged white man wearing a light brown coat, dark brown pants and a black hat. The man has just fallen off the horse. On the right side of the frame is another brown horse. On the horse is a middle-aged white man wearing light-colored clothes. The horse on the right side of the frame is slowly moving forward, approaching the fallen man. The weather is clear. There is a forest in the background.\nAs the camera switches, the picture focuses on the man who has fallen. The man is trying to get up from the fallen horse. The man's movements are somewhat laborious, and his arms are on the ground to support his body. There is a large rock behind the man. The trees in the background are tall and thin. The leaves are a mixture of green and yellow.\nFinally, the camera switches to a man on horseback. The man is wearing a light-colored hat. The man is riding his horse listlessly toward the left side of the screen.", "events": [ { "event": "The video begins with the camera focused on an open meadow. On the left side of the frame, a brown horse is lying on the ground. Next to the horse lies a middle-aged white man wearing a light brown coat, dark brown pants and a black hat. The man has just fallen off the horse. On the right side of the frame is another brown horse. On the horse is a middle-aged white man wearing light-colored clothes. The horse on the right side of the frame is slowly moving forward, approaching the fallen man. The weather is clear. There is a forest in the background.", "visual_elements": [ { "content": "The camera is focused on an open meadow.", "type": "camera", "weight": 3 }, { "content": "There is a brown horse lying on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "Next to the horse lies a middle-aged white man wearing a light brown coat, dark brown pants and a black hat.", "type": "attribute", "weight": 3 }, { "content": "The man has just fallen off the horse.", "type": "action", "weight": 1 }, { "content": "There is another brown horse on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "A middle-aged white man wearing light-colored clothes is riding on the horse.", "type": "attribute", "weight": 1 }, { "content": "The horse on the right side of the screen is slowly moving forward, approaching the fallen man.", "type": "action", "weight": 3 }, { "content": "The weather is clear.", "type": "scene", "weight": 2 }, { "content": "There is a forest in the background.", "type": "scene", "weight": 3 } ] }, { "event": "As the camera switches, the picture focuses on the man who has fallen. The man is trying to get up from the fallen horse. The man's movements are somewhat laborious, and his arms are on the ground to support his body. There is a large rock behind the man. The trees in the background are tall and thin. The leaves are a mixture of green and yellow.", "visual_elements": [ { "content": "As the camera switches, the picture focuses on the fallen man.", "type": "camera", "weight": 3 }, { "content": "The man is trying to get up from the fallen horse.", "type": "action", "weight": 3 }, { "content": "The man's movements are a bit laborious, and his arms are on the ground to support his body.", "type": "action", "weight": 3 }, { "content": "There is a large rock behind the man.", "type": "scene", "weight": 2 }, { "content": "The trees in the background are tall and thin.", "type": "scene", "weight": 1 }, { "content": "The leaves are a mixture of green and yellow tones.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera switches to a man on horseback. The man is wearing a light-colored hat. The man is riding his horse listlessly toward the left side of the screen.", "visual_elements": [ { "content": "Finally, the camera switches to the man on horseback.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a light-colored hat.", "type": "attribute", "weight": 2 }, { "content": "The man is riding his horse listlessly towards the left side of the screen.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0231", "video_path": "VELOCITI/v_96dlvQbYEds_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera pointed at the counter of a musical instrument store. A middle-aged man in a blue shirt and brown pants stands behind the counter. In front of the counter stands a young man in a red plaid shirt and blue jeans. The young man looks down at the camera in the counter. In the background, guitars are hung on the right side of the screen. Many guns are hung on the left side of the screen. A clock is hung on the wall behind the middle-aged man.\nThen, the camera switches to a surveillance camera above the counter. The camera swings left and right. There are several dark blue drums on the left side of the screen. There are some brass instruments, such as trombones and saxophones, hanging on the wall on the right side of the screen. There is a green wall below the camera. Various styles of watches are hung on the green wall.\nThen, the camera switches back to the back of the middle-aged man behind the counter. The middle-aged man takes out a camera from the counter and hands it to the young man in front of the counter. The young man takes the camera and examines it carefully. Finally, the middle-aged man lies on the counter and talks to the young man. The lighting in the store mainly comes from several lights on the ceiling. Various accessories are placed on the brown wooden shelves in the background.", "events": [ { "event": "The video begins with the camera pointed at the counter of a musical instrument store. A middle-aged man in a blue shirt and brown pants stands behind the counter. In front of the counter stands a young man in a red plaid shirt and blue jeans. The young man looks down at the camera in the counter. In the background, guitars are hung on the right side of the screen. Many guns are hung on the left side of the screen. A clock is hung on the wall behind the middle-aged man.", "visual_elements": [ { "content": "The camera is pointed at the counter of a musical instrument store.", "type": "camera", "weight": 3 }, { "content": "A middle-aged man in a blue shirt and brown pants stands behind the counter.", "type": "attribute", "weight": 3 }, { "content": "A young man in a red plaid shirt and blue jeans stands in front of the counter.", "type": "attribute", "weight": 3 }, { "content": "The young man looks down at the camera in the counter.", "type": "action", "weight": 3 }, { "content": "In the background, guitars are hung on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "Many guns are hung on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "A clock is hung on the wall behind the middle-aged man.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to a surveillance camera above the counter. The camera swings left and right. There are several dark blue drums on the left side of the screen. There are some brass instruments, such as trombones and saxophones, hanging on the wall on the right side of the screen. There is a green wall below the camera. Various styles of watches are hung on the green wall.", "visual_elements": [ { "content": "Then, the camera switches to a surveillance camera above the counter.", "type": "camera", "weight": 3 }, { "content": "The camera swings left and right.", "type": "action", "weight": 3 }, { "content": "There are several dark blue drums on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "Some brass instruments, such as trombones and saxophones, are hung on the wall on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "There is a green wall under the camera.", "type": "scene", "weight": 2 }, { "content": "Watches of various styles are hung on the green wall.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches back to the back of the middle-aged man behind the counter. The middle-aged man takes out a camera from the counter and hands it to the young man in front of the counter. The young man takes the camera and examines it carefully. Finally, the middle-aged man lies on the counter and talks to the young man. The lighting in the store mainly comes from several lights on the ceiling. Various accessories are placed on the brown wooden shelves in the background.", "visual_elements": [ { "content": "Then, the camera switches back to the back of the middle-aged man behind the counter.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man takes a camera from the counter and hands it to the young man in front of the counter.", "type": "action", "weight": 3 }, { "content": "The young man takes the camera and examines it carefully.", "type": "action", "weight": 2 }, { "content": "Finally, the middle-aged man lies on the counter and talks to the young man.", "type": "action", "weight": 1 }, { "content": "The lighting in the store mainly comes from several lights on the ceiling.", "type": "scene", "weight": 1 }, { "content": "Various accessories are placed on the brown wooden shelves in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0232", "video_path": "VELOCITI/v_9BHXzftnFGA_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a middle-aged white man and woman who are kissing. The woman has curly medium-length hair and wears a red coat. The man has a beard and wears a black coat. The background is a corner of the corridor of the room. The wall on the right side of the screen is striped with dark brown and light brown. There is a painting hanging on the wall at the end of the corridor on the left side of the screen.\nNext, the camera moves to the left with the woman's movements. The woman is wearing a light brown glove. The woman releases her hands from holding the man's face, then turns and walks to the end of the corridor. Then, the woman opens the white door on the right side of the screen. The man follows her, opens the door, and watches the woman go out.\nFinally, the camera switches to a new corridor scene. The woman walks in the corridor with her back to the camera. The pink belt on the woman's waist is long enough to drag on the ground. The white walls on both sides of the corridor are equipped with exquisite wall lamps, which illuminate the entire corridor. There is an arch at the end of the corridor.", "events": [ { "event": "At the beginning of the video, the camera is focused on a middle-aged white man and woman who are kissing. The woman has curly medium-length hair and wears a red coat. The man has a beard and wears a black coat. The background is a corner of the corridor of the room. The wall on the right side of the screen is striped with dark brown and light brown. There is a painting hanging on the wall at the end of the corridor on the left side of the screen.", "visual_elements": [ { "content": "The camera is focused on a middle-aged white man and woman who are kissing.", "type": "camera", "weight": 3 }, { "content": "The woman has curly medium-length hair and wears a red coat.", "type": "attribute", "weight": 3 }, { "content": "The man has a beard and wears a black coat.", "type": "attribute", "weight": 3 }, { "content": "The background is a corner of the corridor in the room.", "type": "scene", "weight": 3 }, { "content": "The wall on the right side of the screen is in the shape of dark brown and light brown stripes.", "type": "scene", "weight": 2 }, { "content": "There is a painting hanging on the wall at the end of the corridor on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera moves to the left with the woman's movements. The woman is wearing a light brown glove. The woman releases her hands from holding the man's face, then turns and walks to the end of the corridor. Then, the woman opens the white door on the right side of the screen. The man follows her, opens the door, and watches the woman go out.", "visual_elements": [ { "content": "Next, the camera moves to the left with the woman's movements.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a light brown glove.", "type": "attribute", "weight": 1 }, { "content": "The woman loosens her hands holding the man's face, then turns and walks to the end of the corridor.", "type": "action", "weight": 3 }, { "content": "Then, the woman opens the white door on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The man follows her, opens the door and watches the woman go out.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera switches to a new corridor scene. The woman walks in the corridor with her back to the camera. The pink belt on the woman's waist is long enough to drag on the ground. The white walls on both sides of the corridor are equipped with exquisite wall lamps, which illuminate the entire corridor. There is an arch at the end of the corridor.", "visual_elements": [ { "content": "Finally, the camera switches to a new corridor scene.", "type": "camera", "weight": 3 }, { "content": "The woman walks in the corridor with her back to the camera.", "type": "action", "weight": 3 }, { "content": "The pink belt on the woman's waist is long enough to drag on the ground.", "type": "attribute", "weight": 2 }, { "content": "The white walls on both sides of the corridor are equipped with exquisite wall lamps.", "type": "scene", "weight": 2 }, { "content": "The wall lamps illuminate the entire corridor.", "type": "scene", "weight": 1 }, { "content": "There is an arch at the end of the corridor.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0233", "video_path": "VELOCITI/v_9BHXzftnFGA_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at a half-open door. The main characters in the picture are a middle-aged white man and woman. A woman in a red coat has her back to the camera. The woman has a pink belt tied around her waist and her curly black hair is scattered on her shoulders. On the right side of the picture, a man in a black coat stands in the corridor with his back to the camera. The woman holds the door with her right hand and stretches her head into the door. The man watches the woman's movements in front of him. There are movie posters hanging on the white wall on the right side of the picture. There is a full-length mirror with a gold frame on the left side of the picture. A white scarf is hung on the mirror.\nThen, the woman wearing sunglasses turns around, closes the door and says a few words to the man.Then, the woman anxiously runs out of the picture to the left.The man has a beard and his eyes follow the direction of the woman's movement。\nThe camera follows the woman's movements and switches to a new scene. The woman is wearing light-colored gloves. The woman pushes open the mirror door in the center of the screen, turns and runs to the right of the screen. The walls of this room have dark brown and light brown striped wallpaper. There is a brown wooden table on the right side of the screen. There is a table lamp and some decorations on the table. The mirror in the middle of the screen reflects a brown leather sofa and a black poster on the wall in the depths of the room.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a half-open door. The main characters in the picture are a middle-aged white man and woman. A woman in a red coat has her back to the camera. The woman has a pink belt tied around her waist and her curly black hair is scattered on her shoulders. On the right side of the picture, a man in a black coat stands in the corridor with his back to the camera. The woman holds the door with her right hand and stretches her head into the door. The man watches the woman's movements in front of him. There are movie posters hanging on the white wall on the right side of the picture. There is a full-length mirror with a gold frame on the left side of the picture. A white scarf is hung on the mirror.", "visual_elements": [ { "content": "The camera is pointed at a half-open door.", "type": "camera", "weight": 3 }, { "content": "The main characters in the picture are a middle-aged white man and woman.", "type": "camera", "weight": 3 }, { "content": "A woman in a red coat has her back to the camera.", "type": "attribute", "weight": 3 }, { "content": "The woman has a pink belt tied around her waist and her curly black hair is scattered on her shoulders.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the picture, a man in a black coat stands in the hallway with his back to the camera.", "type": "attribute", "weight": 3 }, { "content": "The woman holds the door with her right hand and stretches her head into the door.", "type": "action", "weight": 3 }, { "content": "The man watches the woman's movements in front of him.", "type": "action", "weight": 2 }, { "content": "There is a movie poster hanging on the white wall on the right side of the picture.", "type": "scene", "weight": 1 }, { "content": "There is a full-length mirror with a gold frame on the left side of the picture.", "type": "scene", "weight": 1 }, { "content": "A white scarf is hung on the mirror.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the woman wearing sunglasses turns around, closes the door and says a few words to the man.Then, the woman anxiously runs out of the picture to the left.The man has a beard and his eyes follow the direction of the woman's movement。", "visual_elements": [ { "content": "Then, the woman wearing sunglasses turns around, closes the door and says a few words to the man.", "type": "action", "weight": 3 }, { "content": "Then, the woman anxiously runs out of the picture to the left.", "type": "action", "weight": 3 }, { "content": "The man has a beard and his eyes follow the direction of the woman's movement。", "type": "action", "weight": 3 } ] }, { "event": "The camera follows the woman's movements and switches to a new scene. The woman is wearing light-colored gloves. The woman pushes open the mirror door in the center of the screen, turns and runs to the right of the screen. The walls of this room have dark brown and light brown striped wallpaper. There is a brown wooden table on the right side of the screen. There is a table lamp and some decorations on the table. The mirror in the middle of the screen reflects a brown leather sofa and a black poster on the wall in the depths of the room.", "visual_elements": [ { "content": "The camera follows the woman's movements and switches to a new scene.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing light-colored gloves.", "type": "attribute", "weight": 1 }, { "content": "The woman pushes open the mirror door in the center of the screen, turns and runs to the right of the screen.", "type": "action", "weight": 3 }, { "content": "The walls of this room have dark brown and light brown striped wallpaper.", "type": "scene", "weight": 3 }, { "content": "There is a brown wooden table on the right side of the screen.", "type": "scene", "weight": 1 }, { "content": "There is a table lamp and some decorations on the table.", "type": "scene", "weight": 1 }, { "content": "The mirror in the middle of the screen reflects a brown leather sofa and a black poster on the wall in the depths of the room.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0234", "video_path": "VELOCITI/v_9D5WsQNIAcE_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video is in black and white. At the beginning of the video, the screen shows a scene in a hospital room. There are four people in the room. There is a patient lying on the bed. There is a middle-aged white doctor in a white coat on the right side of the screen. The doctor opens the door and walks in. In the middle of the screen, there is an older white man in a black coat standing next to the bed. The older man holds a cane and a hat in his left hand. In front of the older man on the left side of the screen is a young woman wearing a fur collar coat and a hat. The older man pulls out a chair next to the bed for the woman to sit down. There is a screen on the left side of the screen. Next to the screen is a small table. There is also a chair in the foreground on the left side of the screen.\nThen, the older man turns and walks toward the doctor on the right side of the screen. The doctor stands at the table and looks over the documents. The woman sits by the bed, leaning forward to comfort the patient.\nFinally, the camera switches to the back of the woman sitting. The patient is covered with a dark blanket. The woman's short hair is yellow and curly. The woman almost climbs onto the bed.", "events": [ { "event": "The video is in black and white. At the beginning of the video, the screen shows a scene in a hospital room. There are four people in the room. There is a patient lying on the bed. There is a middle-aged white doctor in a white coat on the right side of the screen. The doctor opens the door and walks in. In the middle of the screen, there is an older white man in a black coat standing next to the bed. The older man holds a cane and a hat in his left hand. In front of the older man on the left side of the screen is a young woman wearing a fur collar coat and a hat. The older man pulls out a chair next to the bed for the woman to sit down. There is a screen on the left side of the screen. Next to the screen is a small table. There is also a chair in the foreground on the left side of the screen.", "visual_elements": [ { "content": "The video is in black and white.", "type": "camera", "weight": 3 }, { "content": "The screen shows a ward scene.", "type": "scene", "weight": 3 }, { "content": "There are four people in the room.", "type": "attribute", "weight": 3 }, { "content": "There is a patient lying on the bed.", "type": "attribute", "weight": 1 }, { "content": "There is a middle-aged white doctor in a white coat on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The doctor opens the door and walks in.", "type": "action", "weight": 3 }, { "content": "In the middle of the screen, there is an older white man in a black coat standing next to the bed.", "type": "attribute", "weight": 3 }, { "content": "The older man holds a cane and a hat in his left hand.", "type": "attribute", "weight": 1 }, { "content": "In front of the older man on the left side of the screen is a young woman wearing a fur collar coat and a hat.", "type": "attribute", "weight": 3 }, { "content": "The older man pulls out a chair next to the bed for the woman to sit down.", "type": "action", "weight": 2 }, { "content": "There is a screen on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "Next to the screen is a small table.", "type": "scene", "weight": 1 }, { "content": "There is also a chair in the foreground on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the older man turns and walks toward the doctor on the right side of the screen. The doctor stands at the table and looks over the documents. The woman sits by the bed, leaning forward to comfort the patient.", "visual_elements": [ { "content": "Then, the older man turns and walks toward the doctor on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The doctor stands at the table and looks over the documents.", "type": "action", "weight": 2 }, { "content": "The woman sits by the bed, leaning forward to comfort the patient.", "type": "action", "weight": 1 } ] }, { "event": "Finally, the camera switches to the back of the woman sitting. The patient is covered with a dark blanket. The woman's short hair is yellow and curly. The woman almost climbs onto the bed.", "visual_elements": [ { "content": "Finally, the camera switches to the back of the woman sitting.", "type": "camera", "weight": 3 }, { "content": "The patient is covered with a dark blanket.", "type": "attribute", "weight": 1 }, { "content": "The woman's short hair is yellow and curly.", "type": "attribute", "weight": 1 }, { "content": "The woman almost climbs onto the bed.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0235", "video_path": "VELOCITI/v_9Dd423YO56c_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, there are two characters in the scene. On the left stands a middle-aged man wearing a dark-colored coat and blue jeans, with his hands in his pockets. On the right sits another man, dressed in a black coat and brown pants, wearing a gray hat. He is seated on the snowy ground, looking at the man in blue jeans as he speaks.\nThen, the man wearing the hat supports his right leg with his right hand and adjusts the shoe on his right foot with his left hand. Meanwhile, the man in blue jeans takes a few steps, glances back, and then supports himself with his left hand, sitting down on the ground. Subsequently, the man in blue jeans lies on the ground, waving his hands above his head. At the same time, the man wearing the hat pushes himself up with his right hand and stands up. Afterward, the man wearing the hat pats his hips with both hands and walks towards the man in blue jeans. In the background, some trees can be seen, with bare branches and a thin layer of snow covering the ground. In the distance, a few street lamps cast a faint glow. The camera slightly shakes.", "events": [ { "event": "At the beginning of the video, there are two characters in the scene. On the left stands a middle-aged man wearing a dark-colored coat and blue jeans, with his hands in his pockets. On the right sits another man, dressed in a black coat and brown pants, wearing a gray hat. He is seated on the snowy ground, looking at the man in blue jeans as he speaks.", "visual_elements": [ { "content": "There are two characters in the scene.", "type": "scene", "weight": 3 }, { "content": "On the left stands a middle-aged man wearing a dark-colored coat and blue jeans, with his hands in his pockets.", "type": "attribute", "weight": 2 }, { "content": "On the right sits another man, dressed in a black coat and brown pants, wearing a gray hat.", "type": "attribute", "weight": 2 }, { "content": "The man wearing the hat is seated on the snowy ground, looking at the man in blue jeans as he speaks.", "type": "action", "weight": 3 } ] }, { "event": "Then, the man wearing the hat supports his right leg with his right hand and adjusts the shoe on his right foot with his left hand. Meanwhile, the man in blue jeans takes a few steps, glances back, and then supports himself with his left hand, sitting down on the ground. Subsequently, the man in blue jeans lies on the ground, waving his hands above his head. At the same time, the man wearing the hat pushes himself up with his right hand and stands up. Afterward, the man wearing the hat pats his hips with both hands and walks towards the man in blue jeans. In the background, some trees can be seen, with bare branches and a thin layer of snow covering the ground. In the distance, a few street lamps cast a faint glow. The camera slightly shakes.", "visual_elements": [ { "content": "The man wearing the hat supports his right leg with his right hand and adjusts the shoe on his right foot with his left hand.", "type": "action", "weight": 3 }, { "content": "The man in blue jeans takes a few steps.", "type": "action", "weight": 3 }, { "content": "The man in blue jeans glances back.", "type": "action", "weight": 3 }, { "content": "The man in blue jeans supports himself with his left hand, sitting down on the ground.", "type": "action", "weight": 3 }, { "content": "The man in blue jeans lies on the ground, waving his hands above his head.", "type": "action", "weight": 3 }, { "content": "The man wearing the hat pushes himself up with his right hand and stands up.", "type": "action", "weight": 3 }, { "content": "Afterward, the man wearing the hat pats his hips with both hands.", "type": "action", "weight": 3 }, { "content": "The man wearing the hat walks towards the man in blue jeans.", "type": "action", "weight": 3 }, { "content": "In the background, some trees can be seen, with bare branches and a thin layer of snow covering the ground. In the distance, a few street lamps cast a faint glow.", "type": "scene", "weight": 1 }, { "content": "The camera slightly shakes.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0236", "video_path": "VELOCITI/v_9FqZ2BMv_RU_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the upper torso of a couple who are dancing. The woman wears a white sleeveless dress, and the man wears a dark-colored shirt. They holds hands, moving gracefully, with a luxuriously decorated room as the background. The walls are adorned with patterned wallpaper, and the curtains are dark red.\nAs the dance progresses, the camera follows the movements of the man. The woman's curly hair gently swings with her dance steps, and her face bears a smile. The man also has a smile on his face.\nNext, The camera cuts to a close-up of the man. His expression is intent, and he is thinking about something with his head bowed. The background is dim, with only some vague objects faintly visible. The man's body rocks rhythmically to the left and right.", "events": [ { "event": "At the beginning of the video, the camera focuses on the upper torso of a couple who are dancing. The woman wears a white sleeveless dress, and the man wears a dark-colored shirt. They holds hands, moving gracefully, with a luxuriously decorated room as the background. The walls are adorned with patterned wallpaper, and the curtains are dark red.", "visual_elements": [ { "content": "The camera focuses on the upper torso of a couple.", "type": "camera", "weight": 3 }, { "content": "They are dancing.", "type": "action", "weight": 3 }, { "content": "The woman wears a white sleeveless dress.", "type": "attribute", "weight": 2 }, { "content": "The man wears a dark-colored shirt.", "type": "attribute", "weight": 2 }, { "content": "They holds hands, moving gracefully.", "type": "action", "weight": 2 }, { "content": "The background is a luxuriously decorated room with patterned wallpaper on the walls and dark red curtains.", "type": "scene", "weight": 1 } ] }, { "event": "As the dance progresses, the camera follows the movements of the man. The woman's curly hair gently swings with her dance steps, and her face bears a smile. The man also has a smile on his face.", "visual_elements": [ { "content": "The camera follows the movements of the man.", "type": "camera", "weight": 3 }, { "content": "The woman's curly hair gently swings with her dance steps.", "type": "attribute", "weight": 1 }, { "content": "The woman has a smile on her face.", "type": "attribute", "weight": 2 }, { "content": "The man has a smile on his face.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, The camera cuts to a close-up of the man. His expression is intent, and he is thinking about something with his head bowed. The background is dim, with only some vague objects faintly visible. The man's body rocks rhythmically to the left and right.", "visual_elements": [ { "content": "The camera cuts to a close-up of the man.", "type": "camera", "weight": 3 }, { "content": "The man's expression is intent, and he is thinking about something with his head bowed.", "type": "attribute", "weight": 3 }, { "content": "The background is dim, with only some vague objects faintly visible.", "type": "scene", "weight": 1 }, { "content": "The man's body rocks rhythmically to the left and right.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0237", "video_path": "VELOCITI/v_9LglzW3HFyg_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young man who wears a red long-sleeved shirt with a spider pattern, standing in a room. Hespeaks while looking at a man holding money. The background of the room is a frosted glass door with a vent above it. The camera shoots the man from the front, showing him standing in the middle of the room.\nNext, The camera cuts to a middle-aged man sitting behind a desk. He wears a brown coat and black underwear, holding a stack of money in his hand. The table is piled with money and documents, and the background wall is covered with various photos and posters. The middle-aged man looks at the young man and speaks. Behind the middle-aged man, there is another man sitting in a black suit.\nThe camera cuts back to the young man, who looks down and then slightly moves his body, turning his head to the right.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young man who wears a red long-sleeved shirt with a spider pattern, standing in a room. Hespeaks while looking at a man holding money. The background of the room is a frosted glass door with a vent above it. The camera shoots the man from the front, showing him standing in the middle of the room.", "visual_elements": [ { "content": "The camera focuses on a young man.", "type": "camera", "weight": 3 }, { "content": "The young man wears a red long-sleeved shirt with a spider pattern, standing in a room.", "type": "attribute", "weight": 2 }, { "content": "The young man speaks while looking at a man holding money.", "type": "action", "weight": 2 }, { "content": "The background of the room is a frosted glass door with a vent above it.", "type": "scene", "weight": 1 }, { "content": "The camera shoots the man from the front, showing him standing in the middle of the room.", "type": "camera", "weight": 3 } ] }, { "event": "Next, The camera cuts to a middle-aged man sitting behind a desk. He wears a brown coat and black underwear, holding a stack of money in his hand. The table is piled with money and documents, and the background wall is covered with various photos and posters. The middle-aged man looks at the young man and speaks. Behind the middle-aged man, there is another man sitting in a black suit.", "visual_elements": [ { "content": "The camera cuts to a middle-aged man sitting behind a desk.", "type": "camera", "weight": 3 }, { "content": "Wearing a brown coat and black underwear, and holding a stack of money in hand.", "type": "attribute", "weight": 2 }, { "content": "The table is piled with money and documents, and the background wall is covered with various photos and posters.", "type": "scene", "weight": 1 }, { "content": "The middle-aged man looks at the young man and speaks.", "type": "action", "weight": 3 }, { "content": "Behind the middle-aged man, there is another man sitting in a black suit.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the young man, who looks down and then slightly moves his body, turning his head to the right.", "visual_elements": [ { "content": "The camera cuts back to the young man.", "type": "camera", "weight": 3 }, { "content": "The young man looks down.", "type": "action", "weight": 3 }, { "content": "The young man slightly moves his body.", "type": "action", "weight": 3 }, { "content": "The young man turns his head to the right.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0238", "video_path": "VELOCITI/v_9LglzW3HFyg_seg_30_40.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young man wearing a red shirt with a spider pattern, standing in a dimly lit room. In the background, blurred glass windows and some indistinct objects can be seen. The man turns his head and then turns around.\nNext, The camera cuts to the back view of the man as he is walking towards the door of the room. On a table inside the room, there is a pile of documents and papers, and next to the table, a man sits wearing a brown coat. The young man approaches the door, opens it, and leaves the room. Meanwhile, another man walks in, carrying a bag and dressed in a black coat and red shirt. The young man passes by a man wearing a leather jacket. The man in leather jacket throws the bag in his hand towards the man seated at the table.\nThe camera cuts to the seated man, who speaks to the man in the leather jacket in front of him. The man seated behind him also turns to look. Suddenly, The camera cuts to a man in a leather jacket, holding a handgun pointed at the seated man at the table. The man holding the gun has a serious expression on his face and speaks while holding the gun.\nThe camera cuts back to the young man, who has already walked into a corridor. He approaches a door and presses the button on the left.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young man wearing a red shirt with a spider pattern, standing in a dimly lit room. In the background, blurred glass windows and some indistinct objects can be seen. The man turns his head and then turns around.", "visual_elements": [ { "content": "The camera focuses on a young man.", "type": "camera", "weight": 3 }, { "content": "The young man wears a red shirt with a spider pattern, standing in a dimly lit room.", "type": "attribute", "weight": 2 }, { "content": "In the background, blurred glass windows and some indistinct objects can be seen.", "type": "scene", "weight": 1 }, { "content": "The young man turns his head and then turns around.", "type": "action", "weight": 2 } ] }, { "event": "Next, The camera cuts to the back view of the man as he is walking towards the door of the room. On a table inside the room, there is a pile of documents and papers, and next to the table, a man sits wearing a brown coat. The young man approaches the door, opens it, and leaves the room. Meanwhile, another man walks in, carrying a bag and dressed in a black coat and red shirt. The young man passes by a man wearing a leather jacket. The man in leather jacket throws the bag in his hand towards the man seated at the table.", "visual_elements": [ { "content": "The camera cuts to the back view of the young man.", "type": "camera", "weight": 3 }, { "content": "The young man is walking towards the door of the room.", "type": "action", "weight": 2 }, { "content": "On a table inside the room, there is a pile of documents and papers.", "type": "scene", "weight": 1 }, { "content": "Next to the table, a man sits wearing a brown coat.", "type": "attribute", "weight": 1 }, { "content": "The young man approaches the door, opens it, and leaves the room.", "type": "action", "weight": 3 }, { "content": "Another man walks in.", "type": "action", "weight": 3 }, { "content": "The man carries a bag, wearing a black coat and a red shirt.", "type": "action", "weight": 2 }, { "content": "The young man passes by a man wearing a leather jacket.", "type": "action", "weight": 3 }, { "content": "The man in leather jacket throws the bag in his hand towards the man seated at the table.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the seated man, who speaks to the man in the leather jacket in front of him. The man seated behind him also turns to look. Suddenly, The camera cuts to a man in a leather jacket, holding a handgun pointed at the seated man at the table. The man holding the gun has a serious expression on his face and speaks while holding the gun.", "visual_elements": [ { "content": "The camera cuts to the seated man.", "type": "camera", "weight": 3 }, { "content": "The seated man speaks to the man in leather jacket in front of him.", "type": "action", "weight": 3 }, { "content": "The man seated behind also turns to look back.", "type": "action", "weight": 2 }, { "content": "The camera cuts to a man in leather jacket.", "type": "camera", "weight": 3 }, { "content": "The man in leather jacket holds a handgun, pointing it at the man seated at the table.", "type": "action", "weight": 3 }, { "content": "The man holding the gun has a serious expression on his face and speaks while holding the gun.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the young man, who has already walked into a corridor. He approaches a door and presses the button on the left.", "visual_elements": [ { "content": "The camera cuts back to the young man.", "type": "camera", "weight": 3 }, { "content": "The young man has already walked into a corridor.", "type": "action", "weight": 2 }, { "content": "The young man approaches a door.", "type": "action", "weight": 2 }, { "content": "The young man presses the button on the left.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0239", "video_path": "VELOCITI/v_9VLcxXz-0w4_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a middle-aged man wearing a gray sweater. He stands at the doorway of a room, with some family photos and a lamp visible in the background. The man in the gray sweater faces an older man standing in the kitchen, who wears a white shirt, a dark-colored sweater, and glasses.\nThe man in the gray sweater speaks with gestures, showing signs of excitement. The man in the white shirt holds a plate in his hand and bends down to speak with the man in the gray sweater. Next, the man in the gray sweater continues to emphasize his point with gestures, waving his arms more widely. The man in the white shirt turns and enters the kitchen. The man in the gray sweater lowers his hands and glances back. He looks into the kitchen again, raises both hands and waves them,speaking towards the inside. Subsequently, the man in the white shirt walks out of the kitchen with his head down, waves his right hand, and turns to leave.", "events": [ { "event": "At the beginning of the video, the camera focuses on a middle-aged man wearing a gray sweater. He stands at the doorway of a room, with some family photos and a lamp visible in the background. The man in the gray sweater faces an older man standing in the kitchen, who wears a white shirt, a dark-colored sweater, and glasses.", "visual_elements": [ { "content": "The camera focuses on a middle-aged man wearing a gray sweater.", "type": "camera", "weight": 3 }, { "content": "The man in the gray sweater stands at the doorway of a room.", "type": "attribute", "weight": 2 }, { "content": "In the background, some family photos and a lamp can be seen.", "type": "scene", "weight": 1 }, { "content": "The man in the gray sweater faces another older man standing in the kitchen.", "type": "attribute", "weight": 3 }, { "content": "The older man wears a white shirt and a dark-colored sweater, with glasses on.", "type": "attribute", "weight": 1 } ] }, { "event": "The man in the gray sweater speaks with gestures, showing signs of excitement. The man in the white shirt holds a plate in his hand and bends down to speak with the man in the gray sweater. Next, the man in the gray sweater continues to emphasize his point with gestures, waving his arms more widely. The man in the white shirt turns and enters the kitchen. The man in the gray sweater lowers his hands and glances back. He looks into the kitchen again, raises both hands and waves them,speaking towards the inside. Subsequently, the man in the white shirt walks out of the kitchen with his head down, waves his right hand, and turns to leave.", "visual_elements": [ { "content": "The man in the gray sweater speaks with gestures, showing signs of excitement.", "type": "action", "weight": 3 }, { "content": "The man in the white shirt holds a plate in his hand.", "type": "attribute", "weight": 1 }, { "content": "The man in the white shirt bends down to speak to the man in the gray sweater.", "type": "action", "weight": 3 }, { "content": "The man in the gray sweater continues to emphasize his point with gestures, waving his arms widely.", "type": "action", "weight": 3 }, { "content": "The man in the white shirt turns and enters the kitchen.", "type": "action", "weight": 3 }, { "content": "The man in the gray sweater lowers his hands and glances back.", "type": "action", "weight": 3 }, { "content": "The man in the gray sweater looks into the kitchen again.", "type": "action", "weight": 3 }, { "content": "The man in the gray sweater raises both hands and waves them.", "type": "action", "weight": 3 }, { "content": "The man in the gray sweate speaks towards the inside.", "type": "action", "weight": 3 }, { "content": "The man in the white shirt walks out of the kitchen with his head down.", "type": "action", "weight": 3 }, { "content": "The man in the white shirt waves his right hand.", "type": "action", "weight": 3 }, { "content": "The man in the white shirt turns and leaves.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0240", "video_path": "VELOCITI/v_9cb5Ka9SqGM_seg_140_150.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a highway with some trees and concrete walls in the background. A man wearing a white shirt and dark pants stands in the middle of the highway, with his arms spread wide as he crosses the road. In the background, multiple cars can be seen speeding past behind him,  with various colors and models.\nThe camera cuts to a group of people standing beside the highway, filming. Several individuals are gathered around a camera, their expressions intense as they gaze intently at the camera's viewfinder. A man wearing a hat excitedly raises his right hand and speaks. The camera then cuts back to the highway, where the man runs across the road, with vehicles still speeding past in the background. A blonde woman wearing a yellow coat appears on the right side of the screen, turning her head to speak to the man.\nThe camera cuts back to the man, who has reached the bushes beside the highway. He turns around and excitedly moves his body.", "events": [ { "event": "At the beginning of the video, the camera focuses on a highway with some trees and concrete walls in the background. A man wearing a white shirt and dark pants stands in the middle of the highway, with his arms spread wide as he crosses the road. In the background, multiple cars can be seen speeding past behind him,  with various colors and models.", "visual_elements": [ { "content": "The camera focuses on a highway.", "type": "camera", "weight": 3 }, { "content": "In the background, there are some trees and concrete walls.", "type": "scene", "weight": 2 }, { "content": "A man wearing a white shirt and dark pants stands in the middle of the highway.", "type": "attribute", "weight": 3 }, { "content": "The man stretches out his arms and crosses the road.", "type": "action", "weight": 3 }, { "content": "In the background, multiple cars can be seen speeding past him from behind, with various colors and models.", "type": "scene", "weight": 3 } ] }, { "event": "The camera cuts to a group of people standing beside the highway, filming. Several individuals are gathered around a camera, their expressions intense as they gaze intently at the camera's viewfinder. A man wearing a hat excitedly raises his right hand and speaks. The camera then cuts back to the highway, where the man runs across the road, with vehicles still speeding past in the background. A blonde woman wearing a yellow coat appears on the right side of the screen, turning her head to speak to the man.", "visual_elements": [ { "content": "The camera cuts to a group of people.", "type": "camera", "weight": 3 }, { "content": "They stands beside the highway, filming.", "type": "action", "weight": 2 }, { "content": "Several individuals are gathered around a camera.", "type": "attribute", "weight": 2 }, { "content": "Their expressions are intense as they gaze intently at the camera's viewfinder.", "type": "attribute", "weight": 2 }, { "content": "A man wearing a hat excitedly raises his right hand and speaks.", "type": "action", "weight": 3 }, { "content": "The camera cuts back to the highway.", "type": "camera", "weight": 3 }, { "content": "The man runs across the road.", "type": "scene", "weight": 3 }, { "content": "The vehicles in the background continue to speed past.", "type": "camera", "weight": 2 }, { "content": "A blonde woman wearing a yellow coat appears on the right side of the screen, turning her head to speak to the man.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the man, who has reached the bushes beside the highway. He turns around and excitedly moves his body.", "visual_elements": [ { "content": "The camera cuts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man has reached the bushes beside the highway.", "type": "action", "weight": 3 }, { "content": "The man turns around and excitedly moves his body.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0241", "video_path": "VELOCITI/v_9hoSF_oY8Jw_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1070, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a massive monster, with parts of its body exposed on a stone platform. The monster's skin exhibits a rough texture, with a dull color that holds a certain gloss. Its claws are enormous, sharp in shape, and tightly gripping the edge of the platform. In the background, some architectural structures are visible, with light filtering down from above, casting a dim ambiance.\nThe camera slowly pans downwards, cutting to a young man dressed in a dark jacket over a dark T-shirt. The man leans backwards against the side of the stone platform, his arms stretched out, his eyes widened as he gazes upwards, breathing rapidly. The background wall is adorned with ancient relief carvings, depicting several human figures. The man's eyes are fixed on the front, observing every move of the monster.\nThe scene cuts again, revealing a spacious corridor lined with tall stone pillars on both sides and smooth stone flooring. The camera gradually zooms in, revealing two water dispensers at the end of the corridor and some decorations hanging on the walls.\nThe camera cuts back to the man, who stretches out his right hand towards the front, his eyes widened as he looks ahead, and then closes them.\nAt the end of the video, the camera cuts back to the corridor once again, with the monster's head gradually approaching. The monster's mouth opens wide, revealing sharp teeth.", "events": [ { "event": "At the beginning of the video, the camera focuses on a massive monster, with parts of its body exposed on a stone platform. The monster's skin exhibits a rough texture, with a dull color that holds a certain gloss. Its claws are enormous, sharp in shape, and tightly gripping the edge of the platform. In the background, some architectural structures are visible, with light filtering down from above, casting a dim ambiance.", "visual_elements": [ { "content": "The camera focuses on a massive monster.", "type": "camera", "weight": 3 }, { "content": "Parts of the monster's body are exposed on a stone platform.", "type": "attribute", "weight": 3 }, { "content": "The monster's skin exhibits a rough texture, with a dull color that holds a certain gloss.", "type": "attribute", "weight": 2 }, { "content": "The monster's claws are enormous, sharp in shape, and tightly gripping the edge of the platform.", "type": "action", "weight": 3 }, { "content": "In the background, some architectural structures are visible, with light filtering down from above, casting a dim ambiance.", "type": "scene", "weight": 1 } ] }, { "event": "The camera slowly pans downwards, cutting to a young man dressed in a dark jacket over a dark T-shirt. The man leans backwards against the side of the stone platform, his arms stretched out, his eyes widened as he gazes upwards, breathing rapidly. The background wall is adorned with ancient relief carvings, depicting several human figures. The man's eyes are fixed on the front, observing every move of the monster.", "visual_elements": [ { "content": "The camera slowly pans downwards.", "type": "camera", "weight": 3 }, { "content": "The camera cuts to a young man.", "type": "camera", "weight": 3 }, { "content": "The man is dressed in a dark jacket over a dark T-shirt.", "type": "attribute", "weight": 1 }, { "content": "The man leans backwards against the side of the stone platform, his arms stretched out.", "type": "action", "weight": 3 }, { "content": "The man gazes upwards with widened eyes.", "type": "action", "weight": 3 }, { "content": "The man breathes rapidly.", "type": "action", "weight": 3 }, { "content": "The background wall is adorned with ancient relief carvings, depicting several human figures.", "type": "scene", "weight": 1 }, { "content": "The man's eyes are fixed on the front, observing every move of the monster.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts again, revealing a spacious corridor lined with tall stone pillars on both sides and smooth stone flooring. The camera gradually zooms in, revealing two water dispensers at the end of the corridor and some decorations hanging on the walls.", "visual_elements": [ { "content": "The scene cuts again.", "type": "camera", "weight": 3 }, { "content": "A spacious corridor is revealed, lined with tall stone pillars on both sides and smooth stone flooring.", "type": "scene", "weight": 1 }, { "content": "The camera gradually zooms in.", "type": "camera", "weight": 3 }, { "content": "There are two water dispensers at the end of the corridor and some decorations hanging on the walls.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts back to the man, who stretches out his right hand towards the front, his eyes widened as he looks ahead, and then closes them.", "visual_elements": [ { "content": "The camera cuts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man stretches out his right hand towards the front.", "type": "action", "weight": 3 }, { "content": "The man looks ahead with widened eyes.", "type": "action", "weight": 3 }, { "content": "The man then closes his eyes.", "type": "action", "weight": 3 } ] }, { "event": "At the end of the video, the camera cuts back to the corridor once again, with the monster's head gradually approaching. The monster's mouth opens wide, revealing sharp teeth.", "visual_elements": [ { "content": "The camera cuts back to the corridor once again.", "type": "camera", "weight": 3 }, { "content": "The monster's head gradually approaches.", "type": "action", "weight": 3 }, { "content": "The monster's mouth opens wide, revealing sharp teeth.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 24 }, { "index": "TUNA_0242", "video_path": "VELOCITI/v_A3xuABrdKis_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the doorway of a freight elevator. Inside the elevator stands a tall figure wearing a black cloak and a skull mask. A man dressed in a suit is being held by this skull-masked figure. The suited man steps backward and turns his head to speak to someone behind him. They then turn to the right of the scene and exit the scene. In the background, some disorganized items can be seen, including a mattress leaning against the wall and some fabrics. A woman dressed in a purple suit walks towards the elevator.\nThe camera cuts to another suited man. The man in the brown tie raises his right hand to touch his face. He looks upwards and speaks. Subsequently, the man in the brown tie lowers his right hand and glances around.\nThe camera then cuts to a woman dressed in a purple suit, who looks downwards and speaks.\nThe camera cuts again, showing the man in the brown tie kneeling on the ground. He braces both hands against the elevator door that is about to close. and leans out to look to the left. In the background, the figure of the woman in the purple suit can be seen.\nFinally, the camera cuts back to a close-up shot of the woman in the purple suit, who looks downwards.", "events": [ { "event": "At the beginning of the video, the camera focuses on the doorway of a freight elevator. Inside the elevator stands a tall figure wearing a black cloak and a skull mask. A man dressed in a suit is being held by this skull-masked figure. The suited man steps backward and turns his head to speak to someone behind him. They then turn to the right of the scene and exit the scene. In the background, some disorganized items can be seen, including a mattress leaning against the wall and some fabrics. A woman dressed in a purple suit walks towards the elevator.", "visual_elements": [ { "content": "The camera focuses on the doorway of a freight elevator.", "type": "camera", "weight": 3 }, { "content": "Inside the elevator, a tall figure stands wearing a black cloak and a skull mask.", "type": "attribute", "weight": 3 }, { "content": "A man dressed in a suit is being held by the skull-masked figure.", "type": "attribute", "weight": 2 }, { "content": "The suited man steps backward and turns his head to speak to someone behind him.", "type": "action", "weight": 3 }, { "content": "They turn to the right and exit the scene.", "type": "action", "weight": 3 }, { "content": "In the background, some disorganized items can be seen, including a mattress leaning against the wall and some fabrics.", "type": "scene", "weight": 1 }, { "content": "A woman dressed in a purple suit walks towards the elevator.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to another suited man. The man in the brown tie raises his right hand to touch his face. He looks upwards and speaks. Subsequently, the man in the brown tie lowers his right hand and glances around.", "visual_elements": [ { "content": "The camera cuts to another suited man.", "type": "camera", "weight": 3 }, { "content": "The man wears a brown tie.", "type": "attribute", "weight": 2 }, { "content": "The man with the brown tie raises his right hand to touch his face.", "type": "action", "weight": 3 }, { "content": "The man with the brown tie looks upwards and speaks.", "type": "action", "weight": 3 }, { "content": "The man with the brown tie lowers his right hand and glances around.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a woman dressed in a purple suit, who looks downwards and speaks.", "visual_elements": [ { "content": "The camera cuts to a woman dressed in a purple suit.", "type": "camera", "weight": 3 }, { "content": "The woman looks downwards and speaks.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again, showing the man in the brown tie kneeling on the ground. He braces both hands against the elevator door that is about to close. and leans out to look to the left. In the background, the figure of the woman in the purple suit can be seen.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "The man with the brown tie kneels on the ground.", "type": "attribute", "weight": 2 }, { "content": "The man with the brown tie braces both hands against the elevator door that is about to close.", "type": "action", "weight": 3 }, { "content": "The man with the brown tie leans out to look to the left.", "type": "action", "weight": 3 }, { "content": "In the background, the figure of the woman in the purple suit can be seen.", "type": "scene", "weight": 3 } ] }, { "event": "Finally, the camera cuts back to a close-up shot of the woman in the purple suit, who looks downwards.", "visual_elements": [ { "content": "The camera cuts back to a close-up shot of the woman in the purple suit.", "type": "camera", "weight": 3 }, { "content": "The woman looks downwards.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 21 }, { "index": "TUNA_0243", "video_path": "VELOCITI/v_A63kIf5CfgE_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a woman wearing a sweater with diamond patterns. The background is a dimly lit room with wooden walls. Her curly hair cascades over her shoulders. The woman has her eyes and mouth wide open, staring at the camera.\nNext, The camera cuts to a man wearing a blue checkered shirt, with a white cloth bag over his head. There is a small hole in the bag.\nThe camera cuts again, showing the woman with a cut on her thigh, revealing her skin underneath. Subsequently, the camera returns to her upper torso as she bends down and walks backwards. In the background, a blood-covered man can be seen, his upper torso bare, with a white cloth tied around his neck. When the woman bumps into the naked man, she glances back, then opens her mouth wide and her body shudders.\nThe camera cuts once more to the man in the blue checkered shirt, who stands up from the bed.\nFinally, the camera returns to the woman, who stands beside the blood-covered man, looking terrified. The scene gradually becomes blurred. A hand raises a knife in front of the camera.", "events": [ { "event": "At the beginning of the video, the camera focuses on a woman wearing a sweater with diamond patterns. The background is a dimly lit room with wooden walls. Her curly hair cascades over her shoulders. The woman has her eyes and mouth wide open, staring at the camera.", "visual_elements": [ { "content": "The camera focuses on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman wears a sweater with diamond patterns.", "type": "attribute", "weight": 1 }, { "content": "The background is a dimly lit room with wooden walls.", "type": "scene", "weight": 1 }, { "content": "The woman's curly hair cascades over her shoulders.", "type": "attribute", "weight": 1 }, { "content": "The woman has her eyes and mouth wide open, staring at the camera.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, The camera cuts to a man wearing a blue checkered shirt, with a white cloth bag over his head. There is a small hole in the bag.", "visual_elements": [ { "content": "The camera cuts to a man wearing a blue checkered shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the blue checkered shirt has a white cloth bag over his head, with a small hole in it.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts again, showing the woman with a cut on her thigh, revealing her skin underneath. Subsequently, the camera returns to her upper torso as she bends down and walks backwards. In the background, a blood-covered man can be seen, his upper torso bare, with a white cloth tied around his neck. When the woman bumps into the naked man, she glances back, then opens her mouth wide and her body shudders.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "The woman has a cut on her thigh, revealing her skin underneath.", "type": "action", "weight": 3 }, { "content": "The camera returns to her upper torso.", "type": "camera", "weight": 3 }, { "content": "The woman bends down and walks backwards.", "type": "action", "weight": 3 }, { "content": "In the background, a blood-covered man can be seen.", "type": "scene", "weight": 3 }, { "content": "The blood-covered man's upper torso is bare, with a white cloth tied around his neck.", "type": "attribute", "weight": 3 }, { "content": "When the woman bumps into the naked man, she glances back.", "type": "action", "weight": 2 }, { "content": "The woman opens her mouth wide and her body shudders.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts once more to the man in the blue checkered shirt, who stands up from the bed.", "visual_elements": [ { "content": "The camera cuts once more to the man in the blue checkered shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the blue checkered shirt stands up from the bed.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera returns to the woman, who stands beside the blood-covered man, looking terrified. The scene gradually becomes blurred. A hand raises a knife in front of the camera.", "visual_elements": [ { "content": "The camera returns to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman stands beside the blood-covered man, looking terrified.", "type": "attribute", "weight": 2 }, { "content": "The scene gradually becomes blurred.", "type": "camera", "weight": 3 }, { "content": "A hand raises a knife in front of the camera.", "type": "scene", "weight": 3 } ] } ], "n_events": 5, "n_elements": 21 }, { "index": "TUNA_0244", "video_path": "VELOCITI/v_ARUzoPWS4Uk_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a dimly lit curtain, with a faint glimpse of firelight visible behind it. A man holding a torch appears in the scene. He wears dark clothing and a red hat. The man quickly passes through the curtain and runs towards the camera.\nThe camera cuts to a new scene, revealing an ancient underground tomb surrounded by cobwebs and dust. Various ancient artifacts and statues are placed inside the tomb. Shortly afterward, another man and a woman bend down to enter the tomb through the doorway. The man wears a white shirt and brown overalls, with noticeable stains and blood on his shirt. He holds a knife in his right hand and the woman's hand in his left. The woman wears a black sleeveless dress. The two quickly run into the tomb, followed by a man.\nThe camera cuts again, panning left and then backward, showcasing the details inside the tomb: golden statues, ancient treasure chests, and sarcophagi covered in cobwebs. The three people quickly move forward inside the room.", "events": [ { "event": "At the beginning of the video, the camera focuses on a dimly lit curtain, with a faint glimpse of firelight visible behind it. A man holding a torch appears in the scene. He wears dark clothing and a red hat. The man quickly passes through the curtain and runs towards the camera.", "visual_elements": [ { "content": "The camera focuses on a dimly lit curtain.", "type": "camera", "weight": 3 }, { "content": "A faint glimpse of firelight is visible behind the curtain.", "type": "scene", "weight": 1 }, { "content": "A man holding a torch appears in the scene.", "type": "scene", "weight": 3 }, { "content": "The man wears dark clothing and a red hat.", "type": "attribute", "weight": 1 }, { "content": "He quickly passes through the curtain and runs towards the camera.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a new scene, revealing an ancient underground tomb surrounded by cobwebs and dust. Various ancient artifacts and statues are placed inside the tomb. Shortly afterward, another man and a woman bend down to enter the tomb through the doorway. The man wears a white shirt and brown overalls, with noticeable stains and blood on his shirt. He holds a knife in his right hand and the woman's hand in his left. The woman wears a black sleeveless dress. The two quickly run into the tomb, followed by a man.", "visual_elements": [ { "content": "The camera cuts to a new scene.", "type": "camera", "weight": 3 }, { "content": "It's an ancient underground tomb surrounded by cobwebs and dust.", "type": "scene", "weight": 2 }, { "content": "Various ancient artifacts and statues are placed inside the tomb.", "type": "scene", "weight": 1 }, { "content": "Another man and a woman bend down to enter the tomb through the doorway.", "type": "action", "weight": 3 }, { "content": "The man wears a white shirt and brown overalls, with noticeable stains and blood on his shirt.", "type": "attribute", "weight": 1 }, { "content": "He holds a knife in his right hand and the woman's hand in his left.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a black sleeveless dress.", "type": "attribute", "weight": 3 }, { "content": "The two quickly run into the tomb.", "type": "action", "weight": 1 }, { "content": "They are also followed by a man.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again, panning left and then backward, showcasing the details inside the tomb: golden statues, ancient treasure chests, and sarcophagi covered in cobwebs. The three people quickly move forward inside the room.", "visual_elements": [ { "content": "The camera cuts again, panning left and then backward.", "type": "camera", "weight": 3 }, { "content": "The scene showcases the details inside the tomb: golden statues, ancient treasure chests, and sarcophagi covered in cobwebs.", "type": "scene", "weight": 1 }, { "content": "The three people quickly move forward inside the room.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0245", "video_path": "VELOCITI/v_ARUzoPWS4Uk_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a man dressed in light-colored clothing, who lying on the ground with his right hand extended forward. The camera then cuts to another angle, where another man dressed in dark clothing can be seen lying on the other side of a stone door, his left hand reaching out to the man in light-colored clothing.\nThe camera then cuts to a closer angle, showing a close-up of the face of the man in light-colored clothing. Next, the camera cuts to the man in dark clothing, who is struggling to reach his hand through the stone door. In the background, some ancient walls and carvings can be seen. The camera then cuts back to the man in light-colored clothing. The stone door eventually closes, and the man in light-colored clothing withdraws his hand. He stands up on his hands and knees and runs forward.\nThe camera cuts to the man in dark clothing, who is now lying on his hands on the stone door and gradually stands up. He then turns to his right and picks up a torch nearby. Following that, he turns around and looks around, raising his head.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man dressed in light-colored clothing, who lying on the ground with his right hand extended forward. The camera then cuts to another angle, where another man dressed in dark clothing can be seen lying on the other side of a stone door, his left hand reaching out to the man in light-colored clothing.", "visual_elements": [ { "content": "The camera focuses on a man dressed in light-colored clothing.", "type": "camera", "weight": 3 }, { "content": "The man in light-colored clothing lies on the ground with his right hand extended forward.", "type": "action", "weight": 3 }, { "content": "The camera then cuts to another angle.", "type": "camera", "weight": 3 }, { "content": "In the background, another man dressed in dark clothing can be seen lying on the other side of a stone door.", "type": "scene", "weight": 2 }, { "content": "The man in dark clothing has his left hand reaching out to the man in light-colored clothing.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a closer angle, showing a close-up of the face of the man in light-colored clothing. Next, the camera cuts to the man in dark clothing, who is struggling to reach his hand through the stone door. In the background, some ancient walls and carvings can be seen. The camera then cuts back to the man in light-colored clothing. The stone door eventually closes, and the man in light-colored clothing withdraws his hand. He stands up on his hands and knees and runs forward.", "visual_elements": [ { "content": "The camera then cuts to a closer angle, showing a close-up of the face of the man in light-colored clothing.", "type": "camera", "weight": 3 }, { "content": "The camera cuts to the man in dark clothing.", "type": "camera", "weight": 3 }, { "content": "The man in dark clothing is struggling to reach his hand through the stone door.", "type": "action", "weight": 2 }, { "content": "In the background, some ancient walls and carvings can be seen.", "type": "scene", "weight": 1 }, { "content": "The camera then cuts back to the man in light-colored clothing.", "type": "camera", "weight": 3 }, { "content": "The stone door eventually closes, and the man in light-colored clothing withdraws his hand.", "type": "action", "weight": 3 }, { "content": "The man in light-colored clothing stands up on his hands and knees and runs forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the man in dark clothing, who is now lying on his hands on the stone door and gradually stands up. He then turns to his right and picks up a torch nearby. Following that, he turns around and looks around, raising his head.", "visual_elements": [ { "content": "The camera cuts to the man in dark clothing.", "type": "camera", "weight": 3 }, { "content": "The man in dark clothing is lying on his hands on the stone door and gradually stands up.", "type": "action", "weight": 3 }, { "content": "The man in dark clothing turns to his right and picks up a torch nearby.", "type": "action", "weight": 3 }, { "content": "The man in dark clothing turns around and looks around, raising his head.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0246", "video_path": "VELOCITI/v_AWi8x9ctlps_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a black man wearing a light-colored shirt. The camera gradually pans up slightly, revealing the man's serious expression as he speaks to someone ahead. The scene cuts to another white man, wearing in a similar light-colored shirt, gritting his teeth and staring ahead. The background features some trees.\nNext, the camera cuts to the two men standing on an open meadow, facing each other in confrontation. They start pushing each other, which soon escalates into a fierce fight.The camera captures their actions of punching and wrestling. During the fight, the camera constantly cuts angles to show different perspectives and detailed actions of the two persons.\nFinally, the camera zooms out, revealing the two persons continuing their fight on the meadow, with the trees and sky in the background clearly visible.", "events": [ { "event": "At the beginning of the video, the camera focuses on a black man wearing a light-colored shirt. The camera gradually pans up slightly, revealing the man's serious expression as he speaks to someone ahead. The scene cuts to another white man, wearing in a similar light-colored shirt, gritting his teeth and staring ahead. The background features some trees.", "visual_elements": [ { "content": "The camera focuses on a black man wearing a light-colored shirt.", "type": "camera", "weight": 3 }, { "content": "The camera gradually pans up slightly.", "type": "camera", "weight": 3 }, { "content": "The man has a serious expression and is speaking to someone ahead.", "type": "action", "weight": 3 }, { "content": "The scene cutsto another white man.", "type": "camera", "weight": 3 }, { "content": "The white man wears a similar light-colored shirt.", "type": "attribute", "weight": 1 }, { "content": "The white man grits his teeth and staring ahead.", "type": "action", "weight": 3 }, { "content": "The background features some trees.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera cuts to the two men standing on an open meadow, facing each other in confrontation. They start pushing each other, which soon escalates into a fierce fight.The camera captures their actions of punching and wrestling. During the fight, the camera constantly cuts angles to show different perspectives and detailed actions of the two persons.", "visual_elements": [ { "content": "The camera cuts to the two men standing on an open meadow, facing each other in confrontation.", "type": "camera", "weight": 3 }, { "content": "The two persons start pushing each other and soon escalate into a fierce fight.", "type": "action", "weight": 3 }, { "content": "The camera captures their actions of punching and wrestling.", "type": "camera", "weight": 3 }, { "content": "The camera constantly cuts angles to show different perspectives and detailed actions of the two persons.", "type": "camera", "weight": 3 } ] }, { "event": "Finally, the camera zooms out, revealing the two persons continuing their fight on the meadow, with the trees and sky in the background clearly visible.", "visual_elements": [ { "content": "The camera zooms out.", "type": "camera", "weight": 3 }, { "content": "The two persons continue their fight on the meadow.", "type": "action", "weight": 3 }, { "content": "The trees and sky in the background are clearly visible.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0247", "video_path": "VELOCITI/v_AWi8x9ctlps_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a white man wearing a shirt, with a background of towering trees. The white man punches the black man facing him with his right hand, knocking him to the ground. Simultaneously, the camera pans to the black man on the ground, revealing some weeds and dirt.\nThe camera then pulls back, showing a wider view. The black man stands up, and the two begin to wrestle vigorously on the hillside. Subsequently, the camera zooms in, capturing the black man continuously hitting the white man in the abdomen. The camera then zooms out, showing the black man knocking the white man to the ground. Immediately after, The camera cuts to the white man. Finally, The camera cuts to the black man, who raises his left hand while turning to his right.", "events": [ { "event": "At the beginning of the video, the camera focuses on a white man wearing a shirt, with a background of towering trees. The white man punches the black man facing him with his right hand, knocking him to the ground. Simultaneously, the camera pans to the black man on the ground, revealing some weeds and dirt.", "visual_elements": [ { "content": "The camera focuses on a white man wearing a shirt.", "type": "camera", "weight": 3 }, { "content": "The background features tall trees.", "type": "scene", "weight": 1 }, { "content": "The white man punches the black man facing him with his right hand, knocking him to the ground.", "type": "action", "weight": 2 }, { "content": "The camera pans to the black man on the ground, with some weeds and dirt visible.", "type": "camera", "weight": 3 } ] }, { "event": "The camera then pulls back, showing a wider view. The black man stands up, and the two begin to wrestle vigorously on the hillside. Subsequently, the camera zooms in, capturing the black man continuously hitting the white man in the abdomen. The camera then zooms out, showing the black man knocking the white man to the ground. Immediately after, The camera cuts to the white man. Finally, The camera cuts to the black man, who raises his left hand while turning to his right.", "visual_elements": [ { "content": "The camera zooms out.", "type": "camera", "weight": 3 }, { "content": "The black man stands up.", "type": "action", "weight": 3 }, { "content": "The two begin to wrestle vigorously on the hillside.", "type": "action", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The black man continuously hits the white man in the abdomen.", "type": "action", "weight": 3 }, { "content": "The camera zooms out.", "type": "camera", "weight": 3 }, { "content": "The black man knocks the white man to the ground.", "type": "action", "weight": 3 }, { "content": "The camera cuts to the white man.", "type": "camera", "weight": 3 }, { "content": "The camera cuts to the black man.", "type": "camera", "weight": 3 }, { "content": "The black man raises his left hand while turning to his right.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0248", "video_path": "VELOCITI/v_Ar-hnj5Zsk4_seg_85_95.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a man dressed in a suit and a woman clad in a striped sweater and a long skirt appear on the screen. The man sits with his legs crossed on a wooden bench, against a background of grassland and trees. The woman, holding a hat in her hand, walks forward while talking. Subsequently, she glances back at the man and continues walking, then leans to her left to look.\nThe camera then cuts to a vintage car traveling along a country road. The car approaches from a distance, gradually getting closer to the camera, with the trees and guardrails alongside the road illuminated by its headlights.\nThe camera cuts back to the woman, who stops in her tracks, bends down to hike up her skirt, revealing her left leg. Following this,The camera focuses in close-up on the man, who stands up.", "events": [ { "event": "At the beginning of the video, a man dressed in a suit and a woman clad in a striped sweater and a long skirt appear on the screen. The man sits with his legs crossed on a wooden bench, against a background of grassland and trees. The woman, holding a hat in her hand, walks forward while talking. Subsequently, she glances back at the man and continues walking, then leans to her left to look.", "visual_elements": [ { "content": "A man dressed in a suit and a woman in a striped sweater and long skirt appear on the screen.", "type": "scene", "weight": 3 }, { "content": "The man sits with his legs crossed on a wooden bench.", "type": "attribute", "weight": 2 }, { "content": "The background is a landscape of grassland and trees.", "type": "scene", "weight": 1 }, { "content": "The woman walks forward while talking.", "type": "action", "weight": 3 }, { "content": "The woman holds a hat in her hand.", "type": "action", "weight": 3 }, { "content": "The woman glances back at the man.", "type": "action", "weight": 3 }, { "content": "The woman continues walking.", "type": "action", "weight": 3 }, { "content": "The woman leans to her left to look.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a vintage car traveling along a country road. The car approaches from a distance, gradually getting closer to the camera, with the trees and guardrails alongside the road illuminated by its headlights.", "visual_elements": [ { "content": "The camera cuts to a vintage car traveling along a country road.", "type": "camera", "weight": 3 }, { "content": "The car approaches from a distance, gradually getting closer to the camera.", "type": "action", "weight": 3 }, { "content": "The trees and guardrails alongside the road are illuminated by its headlights.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts back to the woman, who stops in her tracks, bends down to hike up her skirt, revealing her left leg. Following this,The camera focuses in close-up on the man, who stands up.", "visual_elements": [ { "content": "The camera cuts back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman stops in her tracks.", "type": "action", "weight": 3 }, { "content": "The woman bends down to hike up her skirt, revealing her left leg.", "type": "action", "weight": 3 }, { "content": "The camera focuses in close-up on the man.", "type": "camera", "weight": 3 }, { "content": "The man stands up.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0249", "video_path": "VELOCITI/v_Auq9e3lBq6I_seg_65_75.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a woman with dark curly hair, wearing a black sleeveless top and a red necklace around her neck. The background is blurred, with some vague lighting and the outlines of buildings. She smiles and speaks to someone in front of her.\nNext, The camera cuts to a man dressed in a Santa Claus costume, holding a cigarette in front of him. The background is a green field with some trees.\nThe camera cuts back to the woman again. She smiles and then turns to leave. Subsequently, The camera cuts back to the man, who is leaning against a car with his cigarette smoking. He looks ahead and chews something in his mouth. Suddenly, a man in a white short-sleeved shirt rushes to the man in the Santa costume from the right side of the screen and pushes him with his right hand. The man in the Santa costume turns his head to look at the man in the white short-sleeved shirt.", "events": [ { "event": "At the beginning of the video, the camera focuses on a woman with dark curly hair, wearing a black sleeveless top and a red necklace around her neck. The background is blurred, with some vague lighting and the outlines of buildings. She smiles and speaks to someone in front of her.", "visual_elements": [ { "content": "The camera focuses on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman has dark curly hair, wearing a black sleeveless top and a red necklace around her neck.", "type": "attribute", "weight": 1 }, { "content": "The background is blurred, with some vague lighting and the outlines of buildings.", "type": "scene", "weight": 1 }, { "content": "The woman smiles and speaks to someone in front of her.", "type": "action", "weight": 3 } ] }, { "event": "Next, The camera cuts to a man dressed in a Santa Claus costume, holding a cigarette in front of him. The background is a green field with some trees.", "visual_elements": [ { "content": "The camera cuts to a man.", "type": "camera", "weight": 3 }, { "content": "The man is dressed in a Santa Claus costume.", "type": "attribute", "weight": 2 }, { "content": "The man holds a cigarette in front of him.", "type": "action", "weight": 3 }, { "content": "The background is a green field with some trees.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts back to the woman again. She smiles and then turns to leave. Subsequently, The camera cuts back to the man, who is leaning against a car with his cigarette smoking. He looks ahead and chews something in his mouth. Suddenly, a man in a white short-sleeved shirt rushes to the man in the Santa costume from the right side of the screen and pushes him with his right hand. The man in the Santa costume turns his head to look at the man in the white short-sleeved shirt.", "visual_elements": [ { "content": "The camera then cuts to a woman dressed in a purple suit, who looks downwards and speaks.", "type": "camera", "weight": 3 }, { "content": "The woman smiles and then turns to leave.", "type": "action", "weight": 3 }, { "content": "The camera cuts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man leans against a car, with his cigarette emitting smoke.", "type": "attribute", "weight": 2 }, { "content": "The man gazes ahead.", "type": "attribute", "weight": 1 }, { "content": "The man chews something in his mouth.", "type": "action", "weight": 2 }, { "content": "A man in a white short-sleeved shirt rushes to the man in the Santa costume from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The man in the white short-sleeved shirt pushes the man in the Santa costume with his right hand.", "type": "action", "weight": 3 }, { "content": "The man in the Santa costume turns his head to face the man in the white short-sleeved shirt.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0250", "video_path": "VELOCITI/v_B2zzhcU9f9U_seg_30_40.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the upper torso of a woman wearing a dark coat. The background is blurred, with glimpses of indoor furniture and decorations visible. The camera then pans down to her hands as she operates a mobile phone, holding it with both hands and swiping her fingers across the screen.\nThe camera cuts back to her upper torso again, where she looks up and ahead before moving to the left of the screen. Subsequently, the camera pans to another woman with blonde hair and a man, standing in front of a wall decorated with floral patterns. The blonde woman is holding a baby and then bends down to put the baby down. The man stands behind her, bending down alongside the blonde woman. The first woman in the background turns and leaves.\nThe camera cuts back to the first woman again as she approaches a window. Light from outside shines through the window, illuminating her side face. She holds the phone to her right ear with her right hand, making a call.", "events": [ { "event": "At the beginning of the video, the camera focuses on the upper torso of a woman wearing a dark coat. The background is blurred, with glimpses of indoor furniture and decorations visible. The camera then pans down to her hands as she operates a mobile phone, holding it with both hands and swiping her fingers across the screen.", "visual_elements": [ { "content": "The camera focuses the upper torso of a woman.", "type": "camera", "weight": 3 }, { "content": "The woman wears a dark coat.", "type": "attribute", "weight": 1 }, { "content": "The background is blurred, with indoor furniture and decorations faintly visible.", "type": "scene", "weight": 1 }, { "content": "The camera pans down to her hands.", "type": "camera", "weight": 3 }, { "content": "The woman is operating a mobile phone.", "type": "action", "weight": 3 }, { "content": "The woman holds the phone with both hands, swiping her fingers across the screen.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to her upper torso again, where she looks up and ahead before moving to the left of the screen. Subsequently, the camera pans to another woman with blonde hair and a man, standing in front of a wall decorated with floral patterns. The blonde woman is holding a baby and then bends down to put the baby down. The man stands behind her, bending down alongside the blonde woman. The first woman in the background turns and leaves.", "visual_elements": [ { "content": "The camera cuts back to her upper torso.", "type": "camera", "weight": 3 }, { "content": "The woman looks up and ahead.", "type": "action", "weight": 2 }, { "content": "The woman moves to the left of the screen.", "type": "action", "weight": 3 }, { "content": "The camera pans to another woman with blonde hair and a man.", "type": "camera", "weight": 3 }, { "content": "They stand in front of a wall decorated with floral patterns.", "type": "scene", "weight": 1 }, { "content": "The blonde woman holds a baby.", "type": "attribute", "weight": 3 }, { "content": "The blonde woman bends down to put the baby down.", "type": "action", "weight": 3 }, { "content": "The man stands behind her, bending down with the blonde woman.", "type": "action", "weight": 2 }, { "content": "The first woman in the background turns and walks away.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts back to the first woman again as she approaches a window. Light from outside shines through the window, illuminating her side face. She holds the phone to her right ear with her right hand, making a call.", "visual_elements": [ { "content": "The camera cuts back to the first woman.", "type": "camera", "weight": 3 }, { "content": "The woman approaches a window.", "type": "action", "weight": 2 }, { "content": "Light from outside shines through the window, illuminating her side face.", "type": "scene", "weight": 1 }, { "content": "The woman holds the phone to her right ear, making a call.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0251", "video_path": "VELOCITI/v_B8cWjLMuJgo_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a famous bridge with a blue sky and distant mountains in the background. A male wearing dark clothing and a hat stands on the bank below the bridge. The male's figure is on the right side of the frame and he faces the bridge and the water with his back to the camera.\nThe scene switches to the male walking down the steps, where the male quickly removes his jacket and flings it to the side, revealing a white shirt and dark pants underneath, before quickly jumping into the water with a huge splash.\nThe camera switches to the water and shows a female dressed in black floating on her back in the water. The camera switches again and shows the male swimming in the water, he is approaching another person floating in the water.", "events": [ { "event": "The video begins with the camera focusing on a famous bridge with a blue sky and distant mountains in the background. A male wearing dark clothing and a hat stands on the bank below the bridge. The male's figure is on the right side of the frame and he faces the bridge and the water with his back to the camera.", "visual_elements": [ { "content": "The camera focusing on a famous bridge.", "type": "camera", "weight": 3 }, { "content": "A blue sky and distant mountains in the background.", "type": "scene", "weight": 2 }, { "content": "A male wearing dark clothing and a hat stands on the bank below the bridge.", "type": "scene", "weight": 3 }, { "content": "The male's figure is on the right side of the frame.", "type": "attribute", "weight": 2 }, { "content": "The male faces the bridge and the water with his back to the camera.", "type": "action", "weight": 3 } ] }, { "event": "The scene switches to the male walking down the steps, where the male quickly removes his jacket and flings it to the side, revealing a white shirt and dark pants underneath, before quickly jumping into the water with a huge splash.", "visual_elements": [ { "content": "The scene switches to the male walking down the steps", "type": "camera", "weight": 3 }, { "content": "The male quickly removes his jacket and flings it to the side, revealing a white shirt and dark pants underneath", "type": "action", "weight": 3 }, { "content": "The male quickly jumps into the water with a huge splash.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the water and shows a female dressed in black floating on her back in the water. The camera switches again and shows the male swimming in the water, he is approaching another person floating in the water.", "visual_elements": [ { "content": "The camera switches to the water and shows a female dressed in black floating on her back in the water.", "type": "camera", "weight": 3 }, { "content": "The camera switches again and shows the male swimming in the water,", "type": "camera", "weight": 3 }, { "content": "The male is approaching another person floating in the water.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0252", "video_path": "VELOCITI/v_BAqabMWnAmk_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with images of three males who are standing in front of a brick wall holding weapons. The man on the left is wearing a dark jacket and hat, the man in the middle is wearing a black jacket with red and yellow stripes, the man on the right is wearing a dark hoodie, and the man on the right is wearing a long chain around his neck, and posters are plastered all over the dilapidated back wall.\nThe camera switches to an outdoor scene where a man in a red jacket is crouched next to a car looking around, behind him there is a gray wall and some debris on the ground. The man then stands up, and the woman in the orange blouse and the man in the black and white blouse next to him battle behind the car, all three looking forward.\nThe camera switches again to show a man lying on the floor, dressed in dark clothing and surrounded by smoke. The camera switches to the man on the right side of the chamber, who gradually grits his teeth and walks forward.", "events": [ { "event": "The video begins with images of three males who are standing in front of a brick wall holding weapons. The man on the left is wearing a dark jacket and hat, the man in the middle is wearing a black jacket with red and yellow stripes, the man on the right is wearing a dark hoodie, and the man on the right is wearing a long chain around his neck, and posters are plastered all over the dilapidated back wall.", "visual_elements": [ { "content": "The video begins with images of three males who are standing in front of a brick wall holding weapons.", "type": "attribute", "weight": 3 }, { "content": "The man on the left is wearing a dark jacket and hat.", "type": "attribute", "weight": 2 }, { "content": "The man on the right is wearing a dark hoodie", "type": "attribute", "weight": 2 }, { "content": "The man on the right is wearing a long chain around his neck.", "type": "attribute", "weight": 2 }, { "content": "Posters are plastered all over the dilapidated back wall.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to an outdoor scene where a man in a red jacket is crouched next to a car looking around, behind him there is a gray wall and some debris on the ground. The man then stands up, and the woman in the orange blouse and the man in the black and white blouse next to him battle behind the car, all three looking forward.", "visual_elements": [ { "content": "The camera switches to an outdoor scene where a man in a red jacket is crouched next to a car looking around", "type": "camera", "weight": 3 }, { "content": "Behind the man there is a gray wall and some debris on the ground.", "type": "attribute", "weight": 2 }, { "content": "The man then stands up.", "type": "action", "weight": 3 }, { "content": "The woman in the orange blouse and the man in the black and white blouse next to him battle behind the car, all three looking forward.", "type": "action", "weight": 2 }, { "content": "All three looking forward.", "type": "action", "weight": 1 } ] }, { "event": "The camera switches again to show a man lying on the floor, dressed in dark clothing and surrounded by smoke. The camera switches to the man on the right side of the chamber, who gradually grits his teeth and walks forward.", "visual_elements": [ { "content": "The camera switches again to show a man lying on the floor, dressed in dark clothing.", "type": "camera", "weight": 3 }, { "content": "The man lying on the floor dressed in dark clothing.", "type": "attribute", "weight": 2 }, { "content": "The scene is surrounded by smoke.", "type": "scene", "weight": 2 }, { "content": "The camera switches to the man on the right side of the chamber.", "type": "camera", "weight": 3 }, { "content": "The man gradually grits his teeth and walks forward.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0253", "video_path": "VELOCITI/v_BkvVBZwXVhg_seg_135_145.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera moves to the left and gradually closes in on a man in a firefighting uniform lying on a bridge, the man has his hands on the railings on both sides. Two fully dressed firefighters enter the frame on the left side of the screen, the front firefighter jumps over the lying man holding onto the railing, the back firefighter kneels in front of the man and gives his fire hat and oxygen mask to the man lying on the bridge. In the background is a roaring fire, and a crumbling metal corridor bridge.\nThe camera switches frames to focus on the man lying on the ground, and the image shows the man lying on the ground sitting up with the help of two firefighters, the man looking tearfully at the fire.", "events": [ { "event": "The camera moves to the left and gradually closes in on a man in a firefighting uniform lying on a bridge, the man has his hands on the railings on both sides. Two fully dressed firefighters enter the frame on the left side of the screen, the front firefighter jumps over the lying man holding onto the railing, the back firefighter kneels in front of the man and gives his fire hat and oxygen mask to the man lying on the bridge. In the background is a roaring fire, and a crumbling metal corridor bridge.", "visual_elements": [ { "content": "The camera moves to the left and gradually closes in on a man in a firefighting uniform lying on a bridge.", "type": "camera", "weight": 3 }, { "content": "The man has his hands on the railings on both sides.", "type": "action", "weight": 2 }, { "content": "Two fully dressed firefighters enter the frame on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "The front firefighter jumps over the lying man holding onto the railing", "type": "action", "weight": 2 }, { "content": "The back firefighter kneels in front of the man.", "type": "action", "weight": 2 }, { "content": "The back firefighter gives his fire hat and oxygen mask to the man lying on the bridge.", "type": "action", "weight": 2 }, { "content": "In the background is a roaring fire and a crumbling metal corridor bridge.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches frames to focus on the man lying on the ground, and the image shows the man lying on the ground sitting up with the help of two firefighters, the man looking tearfully at the fire.", "visual_elements": [ { "content": "The camera switches frames to focus on the man lying on the ground,", "type": "camera", "weight": 3 }, { "content": "The image shows the man lying on the ground sitting up with the help of two firefighters.", "type": "action", "weight": 3 }, { "content": "The man is looking tearfully at the fire.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0254", "video_path": "VELOCITI/v_BkvVBZwXVhg_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a male in a fire suit lying on a metal porch bridge with flames and metal structures in the background. The man is sweating profusely as he raises his head to patrol his surroundings, then the man looks ahead.\nThe camera switches frames to show another man in a fireman's uniform, he is sweating profusely and he is anxiously talking to the man lying on the ground. The camera switches to the man lying on the ground as he looks at the other firefighter in front of him. The camera switches to show the man lying on the ground pulling his hands back from the other firefighter and saluting the other firefighter, who faces the man lying on the ground with both hands on the sides of the bridge, then gets up and prepares to jump off the side of the bridge.", "events": [ { "event": "The video begins with the camera focusing on a male in a fire suit lying on a metal porch bridge with flames and metal structures in the background. The man is sweating profusely as he raises his head to patrol his surroundings, then the man looks ahead.", "visual_elements": [ { "content": "The video begins with the camera focusing on a male in a fire suit lying on a metal porch bridge.", "type": "camera", "weight": 3 }, { "content": "Flames and metal structures in the background", "type": "scene", "weight": 2 }, { "content": "The man is sweating profusely.", "type": "attribute", "weight": 1 }, { "content": "The man raises his head to patrol his surroundings, then the man looks ahead.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches frames to show another man in a fireman's uniform, he is sweating profusely and he is anxiously talking to the man lying on the ground. The camera switches to the man lying on the ground as he looks at the other firefighter in front of him. The camera switches to show the man lying on the ground pulling his hands back from the other firefighter and saluting the other firefighter, who faces the man lying on the ground with both hands on the sides of the bridge, then gets up and prepares to jump off the side of the bridge.", "visual_elements": [ { "content": "The camera switches frames to show another man in a fireman's uniform.", "type": "scene", "weight": 2 }, { "content": "The man is sweating profusely.", "type": "attribute", "weight": 1 }, { "content": "The man is anxiously talking to the man lying on the ground.", "type": "action", "weight": 3 }, { "content": "The camera switches to the man lying on the ground.", "type": "camera", "weight": 3 }, { "content": "The man looks at the other firefighter in front of him.", "type": "action", "weight": 3 }, { "content": "The camera switches to show the man lying on the ground pulling his hands back from the other firefighter", "type": "camera", "weight": 3 }, { "content": "The man lying on the ground salutes the other firefighter,", "type": "action", "weight": 2 }, { "content": "The other firefighter, who faces the man lying on the ground with both hands on the sides of the bridge,", "type": "action", "weight": 2 }, { "content": "The other firefighter gets up and prepares to jump off the side of the bridge.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0255", "video_path": "VELOCITI/v_BkvVBZwXVhg_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with two firefighters on a metal corridor bridge in the frame. The firefighter on the left is crouched on the bridge with his hands raised and calling out to the front and pointing his finger to the firefighter lying on the bridge on the right side of the frame, as a roaring fire and metal structure can be seen in the background.\nThe camera switches to an industrial scene that is burning, with two fully equipped firefighters spraying water with hoses and advancing through various obstacles.\nThe camera switches to the crouching firefighter on the stairs, the crouching firefighter looks eagerly into the distance, then the crouching firefighter turns his head to interact with the firefighter lying on the ground. Finally, the camera switches to the face of the firefighter lying on the floor, the expression of the firefighter lying on the floor looks very seriously to his right.", "events": [ { "event": "The video begins with two firefighters on a metal corridor bridge in the frame. The firefighter on the left is crouched on the bridge with his hands raised and calling out to the front and pointing his finger to the firefighter lying on the bridge on the right side of the frame, as a roaring fire and metal structure can be seen in the background.", "visual_elements": [ { "content": "The video begins with two firefighters on a metal corridor bridge in the frame.", "type": "scene", "weight": 3 }, { "content": "The firefighter on the left is crouched on the bridge with his hands raised and calling out to the front.", "type": "action", "weight": 3 }, { "content": "The firefighter on the left is pointing his finger to the firefighter lying on the bridge on the right side of the frame.", "type": "action", "weight": 3 }, { "content": "A roaring fire and metal structure can be seen in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to an industrial scene that is burning, with two fully equipped firefighters spraying water with hoses and advancing through various obstacles.", "visual_elements": [ { "content": "The camera switches to an industrial scene that is burning.", "type": "camera", "weight": 3 }, { "content": "two fully equipped firefighters spraying water with hoses and advancing through various obstacles", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the crouching firefighter on the stairs, the crouching firefighter looks eagerly into the distance, then the crouching firefighter turns his head to interact with the firefighter lying on the ground. Finally, the camera switches to the face of the firefighter lying on the floor, the expression of the firefighter lying on the floor looks very seriously to his right.", "visual_elements": [ { "content": "The camera switches to the crouching firefighter on the stairs", "type": "camera", "weight": 3 }, { "content": "The crouching firefighter looks eagerly into the distance.", "type": "action", "weight": 2 }, { "content": "The crouching firefighter turns his head to interact with the firefighter lying on the ground.", "type": "action", "weight": 2 }, { "content": "The camera switches to the face of the firefighter lying on the floor.", "type": "camera", "weight": 3 }, { "content": "The expression of the firefighter lying on the floor looks very seriously to his right.", "type": "scene", "weight": 3 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0256", "video_path": "VELOCITI/v_ByNjiy8-j8g_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a hallway flanked by marble walls and elevator doors. A young man in a dark jacket and gray pants walks from the end of the hallway, his hands removing the hood of the red sweatshirt he wears underneath, leaking out a black knit cap. A glass door can be seen at the end of the hallway, behind which a man in dark clothing is cleaning the floor.\nAs the man approaches and the camera follows his movements, the man continues to walk forward and the man pulls open a glass with an orange neon sign “W” and enters a new room. The room is decorated in a more modern style, with an abstract painting on the wall, a long sofa and several floor lamps. The camera continues to follow the movements of the man in the hat as he walks towards the reception desk in the center of the room. Behind the reception desk sits a black woman wearing a red leather jacket and silver earrings as the man in the hat and the black receptionist woman engage in conversation.", "events": [ { "event": "The video begins with the camera focusing on a hallway flanked by marble walls and elevator doors. A young man in a dark jacket and gray pants walks from the end of the hallway, his hands removing the hood of the red sweatshirt he wears underneath, leaking out a black knit cap. A glass door can be seen at the end of the hallway, behind which a man in dark clothing is cleaning the floor.", "visual_elements": [ { "content": "The video begins with the camera focusing on a hallway flanked by marble walls and elevator doors.", "type": "camera", "weight": 3 }, { "content": "A young man in a dark jacket and gray pants walks from the end of the hallway.", "type": "action", "weight": 3 }, { "content": "The man's hands remove the hood of the red sweatshirt he wears underneath, leak out a black knit cap.", "type": "action", "weight": 2 }, { "content": "A glass door can be seen at the end of the hallway.", "type": "scene", "weight": 2 }, { "content": "Behind the door a man in dark clothing is cleaning the floor.", "type": "attribute", "weight": 1 } ] }, { "event": "As the man approaches and the camera follows his movements, the man continues to walk forward and the man pulls open a glass with an orange neon sign “W” and enters a new room. The room is decorated in a more modern style, with an abstract painting on the wall, a long sofa and several floor lamps. The camera continues to follow the movements of the man in the hat as he walks towards the reception desk in the center of the room. Behind the reception desk sits a black woman wearing a red leather jacket and silver earrings as the man in the hat and the black receptionist woman engage in conversation.", "visual_elements": [ { "content": "As the man approaches and the camera follows his movements.", "type": "camera", "weight": 3 }, { "content": "The man continues to walk forward.", "type": "action", "weight": 2 }, { "content": "The man pulls open a glass with an orange neon sign “W” and enters a new room.", "type": "action", "weight": 3 }, { "content": "The room is decorated in a more modern style.", "type": "scene", "weight": 2 }, { "content": "An abstract painting is on the wall.", "type": "scene", "weight": 2 }, { "content": "A long sofa and several floor lamps in the room.", "type": "scene", "weight": 2 }, { "content": "The camera continues to follow the movements of the man.", "type": "camera", "weight": 3 }, { "content": "The man in the hat walks towards the reception desk in the center of the room.", "type": "action", "weight": 3 }, { "content": "ehind the reception desk sits a black woman wearing a red leather jacket and silver earrings.", "type": "attribute", "weight": 2 }, { "content": "The man in the hat and the black receptionist woman engage in conversation.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0257", "video_path": "VELOCITI/v_ByNjiy8-j8g_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a young man in a dark jacket and hat who is in a narrow corridor as he opens the door in front of him. The camera switches to the man entering a room, which is dimly lit with a number of recording devices and cluttered desktops, the man suddenly picks up the pace as he rushes towards a man sitting at a console, the two men quickly wrestle as the blonde woman sitting on the device tries to stop them, the camera moves with the man's movements.\nThe camera switches to the man in the hat pushing the other man hard onto the table, the other man tries to fight back but is overpowered by the man, the man throws the other man to the floor, the room is in a mess table tops scattered with items.\nFinally, the camera switches to three men sitting in front of a microphone who are laughing.", "events": [ { "event": "The video begins with the camera focusing on a young man in a dark jacket and hat who is in a narrow corridor as he opens the door in front of him. The camera switches to the man entering a room, which is dimly lit with a number of recording devices and cluttered desktops, the man suddenly picks up the pace as he rushes towards a man sitting at a console, the two men quickly wrestle as the blonde woman sitting on the device tries to stop them, the camera moves with the man's movements.", "visual_elements": [ { "content": "The video begins with the camera focusing on a young man in a dark jacket and hat.", "type": "camera", "weight": 3 }, { "content": "The man is in a narrow corridor.", "type": "action", "weight": 2 }, { "content": "The man opens the door in front of him.", "type": "action", "weight": 3 }, { "content": "The camera switches to the man entering a room", "type": "camera", "weight": 3 }, { "content": "The room is dimly lit.", "type": "scene", "weight": 2 }, { "content": "The room has a number of recording devices and cluttered desktops.", "type": "scene", "weight": 2 }, { "content": "The man suddenly picks up the pace.", "type": "action", "weight": 2 }, { "content": "The man rushes towards a man sitting at a console.", "type": "action", "weight": 3 }, { "content": "The two men quickly wrestle.", "type": "action", "weight": 2 }, { "content": "The blonde woman sitting on the device tries to stop them.", "type": "action", "weight": 2 }, { "content": "The camera moves with the man's movements.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches to the man in the hat pushing the other man hard onto the table, the other man tries to fight back but is overpowered by the man, the man throws the other man to the floor, the room is in a mess table tops scattered with items.", "visual_elements": [ { "content": "The camera switches to the man in the hat pushing the other man hard onto the table.", "type": "camera", "weight": 3 }, { "content": "The other man tries to fight back but is overpowered by the man.", "type": "action", "weight": 2 }, { "content": "The man throws the other man to the floor.", "type": "action", "weight": 2 }, { "content": "The room is in a mess table tops scattered with items.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera switches to three men sitting in front of a microphone who are laughing.", "visual_elements": [ { "content": "The camera switches to three men sitting in front of a microphone who are laughing.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0258", "video_path": "VELOCITI/v_CwdGYMM2bHM_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a man wearing a green jacket and hat, who is standing in the cabin of a boat with a long gun in his hand, with blue sky and the ocean outside the window.\nNext, the camera switches to the exterior of the ship where two men are standing on the foredeck of the ship. The man on the left is standing on the edge of the deck taking pictures as the man on the right begins to climb the exterior of the cabin.\nThe camera switches frames as the man continues to climb, a man in a green jacket has a gun in his hand, he walks towards the man who is climbing, the man in green walks towards the man who is climbing, the man who is climbing quickly climbs away. Finally, the camera switches to three men on a boat at the same time.", "events": [ { "event": "The video begins with the camera focusing on a man wearing a green jacket and hat, who is standing in the cabin of a boat with a long gun in his hand, with blue sky and the ocean outside the window.", "visual_elements": [ { "content": "The video begins with the camera focusing on a man wearing a green jacket and hat,", "type": "camera", "weight": 3 }, { "content": "The man is standing in the cabin of a boat with a long gun in his hand,", "type": "action", "weight": 2 }, { "content": "Outside the window are blue skies and oceans", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera switches to the exterior of the ship where two men are standing on the foredeck of the ship. The man on the left is standing on the edge of the deck taking pictures as the man on the right begins to climb the exterior of the cabin.", "visual_elements": [ { "content": "The camera switches to the exterior of the ship where two men are standing on the foredeck of the ship.", "type": "camera", "weight": 3 }, { "content": "The man on the left is standing on the edge of the deck taking pictures.", "type": "action", "weight": 2 }, { "content": "The man on the right begins to climb the exterior of the cabin.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches frames as the man continues to climb, a man in a green jacket has a gun in his hand, he walks towards the man who is climbing, the man in green walks towards the man who is climbing, the man who is climbing quickly climbs away. Finally, the camera switches to three men on a boat at the same time.", "visual_elements": [ { "content": "The camera switches frames as the man continues to climb.", "type": "camera", "weight": 3 }, { "content": "The man in a green jacket has a gun in his hand, he walks towards the man who is climbing.", "type": "action", "weight": 2 }, { "content": "The man who is climbing quickly climbs away.", "type": "action", "weight": 3 }, { "content": "Finally, the camera switches to three men on a boat at the same time.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0259", "video_path": "VELOCITI/v_D-9zx3m6lLU_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on two male characters. The man on the left is wearing a sleeveless gray t-shirt, a yellow necklace and a blue wristband. The man on the right wears a blue t-shirt and glasses with his back to the camera. In the background is a brown van with windows reflecting the surrounding trees and sky. The man on the left and the man on the right are shown exchanging words, the man on the left makes frequent hand movements, the man on the left suddenly cups the man on the right's face with both hands and the man on the right looks a little surprised. The man on the left turns away and walks towards the van.\nThe camera switches to the man on the left opening the door of the van and sitting in the driver's seat, while the man on the left adjusts the seat and rearview mirror in preparation for starting the vehicle. Finally, the camera switches to the outside of the vehicle and captures the van's license plate number “MAINE 566-241”.", "events": [ { "event": "The video begins with the camera focusing on two male characters. The man on the left is wearing a sleeveless gray t-shirt, a yellow necklace and a blue wristband. The man on the right wears a blue t-shirt and glasses with his back to the camera. In the background is a brown van with windows reflecting the surrounding trees and sky. The man on the left and the man on the right are shown exchanging words, the man on the left makes frequent hand movements, the man on the left suddenly cups the man on the right's face with both hands and the man on the right looks a little surprised. The man on the left turns away and walks towards the van.", "visual_elements": [ { "content": "The video begins with the camera focusing on two male characters.", "type": "camera", "weight": 3 }, { "content": "The man on the left is wearing a sleeveless gray t-shirt, a yellow necklace and a blue wristband.", "type": "attribute", "weight": 2 }, { "content": "The man on the right wears a blue t-shirt and glasses with his back to the camera.", "type": "attribute", "weight": 2 }, { "content": "In the background is a brown van.", "type": "scene", "weight": 2 }, { "content": "Windows reflects the surrounding trees and sky.", "type": "scene", "weight": 2 }, { "content": "The man on the left and the man on the right are shown exchanging words.", "type": "attribute", "weight": 1 }, { "content": "The man on the left makes frequent hand movements.", "type": "scene", "weight": 3 }, { "content": "The man on the left suddenly cups the man on the right's face with both hands.", "type": "action", "weight": 3 }, { "content": "The man on the right looks a little surprised.", "type": "scene", "weight": 2 }, { "content": "The man on the left turns away and walks towards the van.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to the man on the left opening the door of the van and sitting in the driver's seat, while the man on the left adjusts the seat and rearview mirror in preparation for starting the vehicle. Finally, the camera switches to the outside of the vehicle and captures the van's license plate number “MAINE 566-241”.", "visual_elements": [ { "content": "The camera switches to the man on the left opening the door of the van and sitting in the driver's seat.", "type": "camera", "weight": 3 }, { "content": "The man on the left adjusts the seat and rearview mirror in preparation for starting the vehicle.", "type": "action", "weight": 2 }, { "content": "The camera switches to the outside of the vehicle and captures the van's license plate number “MAINE 566-241”.", "type": "camera", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0260", "video_path": "VELOCITI/v_D-9zx3m6lLU_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a young male standing in a doorway. The man is wearing a dark t-shirt and light colored shorts, and is holding a pair of sunglasses. There are several cabins in the background and a few people moving around in the grass.\nThe camera switches to a young female lying on a bed indoors. The female is wearing a pink short-sleeved top and shorts, the female has yellow socks on her feet. The female is lying on the bed with nail polish in her hand and a book spread out in front of her as she paints her nails and speaks towards the male.\nThe camera switches back again to the young male in the doorway, the male stands in the doorway moving his position slightly and talking, the male sunglasses hanging from the collar of his shirt. The camera switches back to the interior again as the young female sits up from the bed and prepares to get up.", "events": [ { "event": "The video begins with the camera focused on a young male standing in a doorway. The man is wearing a dark t-shirt and light colored shorts, and is holding a pair of sunglasses. There are several cabins in the background and a few people moving around in the grass.", "visual_elements": [ { "content": "The video begins with the camera focused on a young male standing in a doorway.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a dark t-shirt and light colored shorts", "type": "attribute", "weight": 2 }, { "content": "The man is holding a pair of sunglasses.", "type": "attribute", "weight": 2 }, { "content": "There are several cabins in the background.", "type": "scene", "weight": 2 }, { "content": "A few people moving around in the grass.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a young female lying on a bed indoors. The female is wearing a pink short-sleeved top and shorts, the female has yellow socks on her feet. The female is lying on the bed with nail polish in her hand and a book spread out in front of her as she paints her nails and speaks towards the male.", "visual_elements": [ { "content": "The camera switches to a young female lying on a bed indoors.", "type": "camera", "weight": 3 }, { "content": "The female is wearing a pink short-sleeved top and shorts", "type": "attribute", "weight": 2 }, { "content": "The female has yellow socks on her feet.", "type": "attribute", "weight": 2 }, { "content": "The female is lying on the bed with nail polish in her hand.", "type": "action", "weight": 2 }, { "content": "A book spread out in front of the female.", "type": "scene", "weight": 2 }, { "content": "The female paints her nails and speaks towards the male.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches back again to the young male in the doorway, the male stands in the doorway moving his position slightly and talking, the male sunglasses hanging from the collar of his shirt. The camera switches back to the interior again as the young female sits up from the bed and prepares to get up.", "visual_elements": [ { "content": "The camera switches back again to the young male in the doorway.", "type": "camera", "weight": 3 }, { "content": "The male stands in the doorway moving his position slightly and talking.", "type": "action", "weight": 2 }, { "content": "The male sunglasses hanging from the collar of his shirt.", "type": "scene", "weight": 3 }, { "content": "The camera switches back to the interior again.", "type": "camera", "weight": 3 }, { "content": "The young female sits up from the bed and prepares to get up.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0261", "video_path": "VELOCITI/v_D83XRiOVARQ_seg_30_40.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a soldier in camouflage uniform wearing a military helmet with glasses, a walkie-talkie on his shoulder, and an American flag emblem on his right arm. In the background is a desolate desert area. Another soldier is lying on the ground with a rifle, pointing it forward on guard.\nThe camera switches to a girl, the girl is standing in front of a car covered in bullet holes with visible flames, the girl is wearing a white long sleeved shirt with a floral print and black pants, the girl has a scared look on her face.\nThe camera switches back to the soldier, the soldier in the stand gazes ahead waving his arms, the soldier remains on his back on a pile of sandbags. The camera switches to the soldier in the station, the soldier's face is stony.", "events": [ { "event": "The video begins with the camera focusing on a soldier in camouflage uniform wearing a military helmet with glasses, a walkie-talkie on his shoulder, and an American flag emblem on his right arm. In the background is a desolate desert area. Another soldier is lying on the ground with a rifle, pointing it forward on guard.", "visual_elements": [ { "content": "The video begins with the camera focusing on a soldier in camouflage uniform.", "type": "camera", "weight": 3 }, { "content": "The soldier wears a military helmet with glasses and a walkie-talkie on his shoulder。", "type": "attribute", "weight": 2 }, { "content": "A walkie-talkie on his shoulder.", "type": "attribute", "weight": 1 }, { "content": "In the background is a desolate desert area.", "type": "scene", "weight": 2 }, { "content": "Another soldier is lying on the ground with a rifle, pointing it forward on guard.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a girl, the girl is standing in front of a car covered in bullet holes with visible flames, the girl is wearing a white long sleeved shirt with a floral print and black pants, the girl has a scared look on her face.", "visual_elements": [ { "content": "The camera switches to a girl, the girl is standing in front of a car covered in bullet holes with visible flames, t", "type": "camera", "weight": 3 }, { "content": "The girl is standing in front of a car covered in bullet holes with visible flames.", "type": "action", "weight": 2 }, { "content": "The girl is wearing a white long sleeved shirt with a floral print and black pants.", "type": "attribute", "weight": 2 }, { "content": "The girl has a scared look on her face.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches back to the soldier, the soldier in the stand gazes ahead waving his arms, the soldier remains on his back on a pile of sandbags. The camera switches to the soldier in the station, the soldier's face is stony.", "visual_elements": [ { "content": "The camera switches back to the soldier", "type": "camera", "weight": 3 }, { "content": "he soldier in the stand gazes ahead waving his arms,", "type": "action", "weight": 3 }, { "content": "the soldier remains on his back on a pile of sandbags.", "type": "action", "weight": 2 }, { "content": "The camera switches to the soldier in the station.", "type": "camera", "weight": 3 }, { "content": "The soldier's face is stony.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0262", "video_path": "VELOCITI/v_DbORPqtzyx4_seg_120_130.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a living room with a large number of glass shards and wood shavings scattered on the floor, sporadic shards falling from the ceiling, and a blonde woman in a white blouse laying on top of the shards on the floor. The camera switches to an overhead angle with a chandelier shaking.\nThe camera switches frames and another black woman appears, the black woman has curly hair is wearing a white blouse. The woman is holding her head with hands and she has a very pained look on her face. The camera switches to the blonde female who is lying on the floor and suddenly opens her eyes.\nFinally, the camera switches again to the curly-haired female, her hand propped up against the wall, a look of confused surprise on her face.", "events": [ { "event": "The video begins with the camera focusing on a living room with a large number of glass shards and wood shavings scattered on the floor, sporadic shards falling from the ceiling, and a blonde woman in a white blouse laying on top of the shards on the floor. The camera switches to an overhead angle with a chandelier shaking.", "visual_elements": [ { "content": "The camera focusing on a living room", "type": "camera", "weight": 3 }, { "content": "The floor is scattering a lot of glass shards and wood shavings.", "type": "scene", "weight": 3 }, { "content": "Debris falls from the ceiling from time to time.", "type": "scene", "weight": 1 }, { "content": "A blonde woman in a white blouse laying on top of the shards on the floor.", "type": "scene", "weight": 3 }, { "content": "The camera switches to an overhead angle with a chandelier shaking.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches frames and another black woman appears, the black woman has curly hair is wearing a white blouse. The woman is holding her head with hands and she has a very pained look on her face. The camera switches to the blonde female who is lying on the floor and suddenly opens her eyes.", "visual_elements": [ { "content": "The camera switches frames and another black woman appears.", "type": "camera", "weight": 3 }, { "content": "The black woman has curly hair is wearing a white blouse.", "type": "attribute", "weight": 2 }, { "content": "The woman is holding her head with hands.", "type": "action", "weight": 2 }, { "content": "The woman has a very pained look on her face.", "type": "attribute", "weight": 1 }, { "content": "The camera switches to the blonde female", "type": "camera", "weight": 3 }, { "content": "The woman is lying on the floor and suddenly opens her eyes.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera switches again to the curly-haired female, her hand propped up against the wall, a look of confused surprise on her face.", "visual_elements": [ { "content": "The camera switches again to the curly-haired female.", "type": "camera", "weight": 3 }, { "content": "The woman's hands propped up against the wall,", "type": "action", "weight": 2 }, { "content": "A look of confused surprise on the woman's face.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0263", "video_path": "VELOCITI/v_E1e4f8YdkLg_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a bulletin board on a wooden wall that is covered with various papers and photographs. The camera switches to an indoor scene where a woman in a white sweater sits at a table as she uses a wooden stick to examine the mouth of a little girl with long hair.\nThe camera switches frames to show a man in dark blue short sleeves and a woman in a denim top slamming open the door and storming inside, the female overturns all the items on her desk onto the floor, the woman in the white sweater stands behind the desk and starts arguing with the two men, all three are very angry, the little girl at the desk watches as the men argue.", "events": [ { "event": "The video begins with the camera focusing on a bulletin board on a wooden wall that is covered with various papers and photographs. The camera switches to an indoor scene where a woman in a white sweater sits at a table as she uses a wooden stick to examine the mouth of a little girl with long hair.", "visual_elements": [ { "content": "The video begins with the camera focusing on a bulletin board on a wooden wall.", "type": "camera", "weight": 3 }, { "content": "A bulletin board covered with various papers and photographs.", "type": "scene", "weight": 2 }, { "content": "The camera switches to an indoor scene.", "type": "camera", "weight": 3 }, { "content": "A woman in a white sweater sits at a table.", "type": "attribute", "weight": 2 }, { "content": "The woman uses a wooden stick to examine the mouth of a little girl with long hair.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches frames to show a man in dark blue short sleeves and a woman in a denim top slamming open the door and storming inside, the female overturns all the items on her desk onto the floor, the woman in the white sweater stands behind the desk and starts arguing with the two men, all three are very angry, the little girl at the desk watches as the men argue.", "visual_elements": [ { "content": "The camera switches frames .", "type": "camera", "weight": 3 }, { "content": "A man in dark blue short sleeves and a woman in a denim top slamming open the door and storming inside.", "type": "action", "weight": 3 }, { "content": "The female overturns all the items on her desk onto the floor", "type": "action", "weight": 3 }, { "content": "The woman in the white sweater stands behind the desk and starts arguing with the two men", "type": "action", "weight": 2 }, { "content": "All three are very angry, the little girl at the desk watches as the men argue.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0264", "video_path": "VELOCITI/v_E1e4f8YdkLg_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the wall of a room and a female in a denim jacket enters the frame, she then walks out of the room. The male in a blue t-shirt stands by a table with an angry rant, there is a clear jar filled with candy on the table, the male picks up the jar and slams it against a blanket hanging on the wall, he then elbows the lamp behind him.\nFinally, the camera switches to a wooden hut with some trees and grass outside, and a sign on the door of the hut that says “Talent Show”.", "events": [ { "event": "The video begins with the camera focusing on the wall of a room and a female in a denim jacket enters the frame, she then walks out of the room. The male in a blue t-shirt stands by a table with an angry rant, there is a clear jar filled with candy on the table, the male picks up the jar and slams it against a blanket hanging on the wall, he then elbows the lamp behind him.", "visual_elements": [ { "content": "The video begins with the camera focusing on the wall of a room.", "type": "camera", "weight": 3 }, { "content": "A female in a denim jacket enters the frame", "type": "scene", "weight": 2 }, { "content": "A female in a denim jacket then walks out of the room.", "type": "action", "weight": 2 }, { "content": "The male in a blue t-shirt stands by a table with an angry rant.", "type": "action", "weight": 2 }, { "content": "There is a clear jar filled with candy on the table.", "type": "scene", "weight": 2 }, { "content": "The male picks up the jar and slams it against a blanket hanging on the wall.", "type": "action", "weight": 2 }, { "content": "Then the male elbows the lamp behind him.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera switches to a wooden hut with some trees and grass outside, and a sign on the door of the hut that says “Talent Show”.", "visual_elements": [ { "content": "The camera switches to a wooden hut.", "type": "camera", "weight": 3 }, { "content": "The wooden hut has some trees and grass outside", "type": "scene", "weight": 2 }, { "content": "A sign on the door of the hut that says “Talent Show”.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0265", "video_path": "VELOCITI/v_E6gWFTv3xE8_seg_125_135.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a male dressed in black, the male is kneeling on the ground performing CPR, surrounded by burning flames and the wreckage of a crashed airplane. In the background heavy rain is pouring down and water is pooling on the ground.\nThe camera moves to the right as the male slowly raises his head and two rescuers wearing orange reflective undershirts appear in the frame, the two rescuers wearing orange reflective undershirts quickly run towards and pick up the male.\nThe camera gradually closes in on the male standing still and staring ahead, with more rescuers busy in the background.", "events": [ { "event": "The video begins with a male dressed in black, the male is kneeling on the ground performing CPR, surrounded by burning flames and the wreckage of a crashed airplane. In the background heavy rain is pouring down and water is pooling on the ground.", "visual_elements": [ { "content": "A male dressed in black appeared on the frame.", "type": "scene", "weight": 3 }, { "content": "The male is kneeling on the ground performing CPR.", "type": "action", "weight": 3 }, { "content": "Surrounded by burning flames and the wreckage of crashed airplanes.", "type": "scene", "weight": 2 }, { "content": "In the background heavy rain is pouring down.", "type": "scene", "weight": 2 }, { "content": "Water is pooling on the ground.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves to the right as the male slowly raises his head and two rescuers wearing orange reflective undershirts appear in the frame, the two rescuers wearing orange reflective undershirts quickly run towards and pick up the male.", "visual_elements": [ { "content": "The camera moves to the right .", "type": "camera", "weight": 3 }, { "content": "The male slowly raises his head.", "type": "action", "weight": 2 }, { "content": "Two rescuers wearing orange reflective undershirts appear in the frame.", "type": "scene", "weight": 2 }, { "content": "The two rescuers wearing orange reflective undershirts quickly run towards and pick up the male.", "type": "action", "weight": 2 } ] }, { "event": "The camera gradually closes in on the male standing still and staring ahead, with more rescuers busy in the background.", "visual_elements": [ { "content": "The camera gradually closes in.", "type": "camera", "weight": 3 }, { "content": "The male is standing still and staring ahead.", "type": "scene", "weight": 2 }, { "content": "More rescuers are busy in the background", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0266", "video_path": "VELOCITI/v_ERw4l461lhU_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a room with photos still plastered on the walls, and a male wearing a black leather jacket is pushed onto a bed, his hair gray and white. The camera switches frames to show a female wearing a white tank top and black pants kicking in the door to the room with a gun raised. The camera switches to the male on the edge of the bed, who looks forward with a terrified expression on his face tilting his head.\nThe camera switches to the female swinging the gun and smacking the male, the camera switches again and the male is shown being smacked and covering his head. Finally, the camera switches frames to show a blonde female crying and looking at the male on the ground as the female with the gun bends over and looks down at the ground.", "events": [ { "event": "The video begins with the camera focusing on a room with photos still plastered on the walls, and a male wearing a black leather jacket is pushed onto a bed, his hair gray and white. The camera switches frames to show a female wearing a white tank top and black pants kicking in the door to the room with a gun raised. The camera switches to the male on the edge of the bed, who looks forward with a terrified expression on his face tilting his head.", "visual_elements": [ { "content": "The camera focusing on a room with photos still plastered on the walls.", "type": "camera", "weight": 3 }, { "content": "A male wearing a black leather jacket is pushed onto a bed.", "type": "action", "weight": 3 }, { "content": "The male has gray and white hair.", "type": "attribute", "weight": 2 }, { "content": "The camera switches frames", "type": "camera", "weight": 3 }, { "content": "A female wearing a white tank top and black pants kicking in the door to the room with a gun raised.", "type": "scene", "weight": 3 }, { "content": "The camera switches to the male on the edge of the bed", "type": "camera", "weight": 3 }, { "content": "The male looks forward with a terrified expression on his face tilting his head.", "type": "scene", "weight": 3 } ] }, { "event": "The camera switches to the female swinging the gun and smacking the male, the camera switches again and the male is shown being smacked and covering his head. Finally, the camera switches frames to show a blonde female crying and looking at the male on the ground as the female with the gun bends over and looks down at the ground.", "visual_elements": [ { "content": "The camera switches to the female swinging the gun and smacking the male.", "type": "camera", "weight": 3 }, { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The male is shown being smacked and covering his head.", "type": "scene", "weight": 3 }, { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "A blonde woman appears on the frame.", "type": "scene", "weight": 2 }, { "content": "The blonde female crying and looking at the male on the ground.", "type": "attribute", "weight": 2 }, { "content": "The female with the gun bends over and looks down at the ground.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0267", "video_path": "VELOCITI/v_ERw4l461lhU_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a room with various photos plastered on the walls. A female wearing a white tank top, she is arguing with a male wearing a gray suit. The female pushes and shoves the male out of the room and removes the camera from around the male's neck, then the female closes the door with force.\nFinally, the camera focuses on the female, who stands in front of the door and picks up the photo on the floor, the female examines the photo in her hand and then looks up and speaks to another person.", "events": [ { "event": "The video begins with the camera focusing on a room with various photos plastered on the walls. A female wearing a white tank top, she is arguing with a male wearing a gray suit. The female pushes and shoves the male out of the room and removes the camera from around the male's neck, then the female closes the door with force.", "visual_elements": [ { "content": "The camera focusing on a room.", "type": "camera", "weight": 3 }, { "content": "The room with various photos plastered on the walls.", "type": "scene", "weight": 2 }, { "content": "A female wearing a white tank top.", "type": "scene", "weight": 2 }, { "content": "The female is arguing with a male wearing a gray suit.", "type": "action", "weight": 3 }, { "content": "The female pushes and shoves the male out of the room and removes the camera from around the male's neck.", "type": "action", "weight": 3 }, { "content": "Then the female closes the door with force.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera focuses on the female, who stands in front of the door and picks up the photo on the floor, the female examines the photo in her hand and then looks up and speaks to another person.", "visual_elements": [ { "content": "The camera focuses on the female.", "type": "camera", "weight": 3 }, { "content": "The female stands in front of the door and picks up the photo on the floor.", "type": "action", "weight": 3 }, { "content": "The female examines the photo in her hand and then looks up and speaks to another person.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0268", "video_path": "VELOCITI/v_EkE_bNKYqCM_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a long-haired man wearing a black dress and white shirt who is sitting at a dining table. The man is holding a white bowl of soup. The camera switches to another man with short hair and a beard, the man with short hair and a beard is holding a silver spoon and the man with short hair and a beard is chewing and talking.\nThe camera switches again to show a female in a white dress wearing a sparkling necklace, the female is looking at a man and smiling then bowing her head. The camera switches to another male with glasses and a beard, wiping his glasses with his finger and then turning his milky head. The camera switches to a man with curly hair who is holding a spoon and talking, and finally, the camera focuses on an older man wearing a black dress and white shirt with a white flower pinned to his chest and is tilting his head.", "events": [ { "event": "The video begins with the camera focusing on a long-haired man wearing a black dress and white shirt who is sitting at a dining table. The man is holding a white bowl of soup. The camera switches to another man with short hair and a beard, the man with short hair and a beard is holding a silver spoon and the man with short hair and a beard is chewing and talking.", "visual_elements": [ { "content": "The camera focusing on a long-haired man wearing a black dress and white shirt.", "type": "camera", "weight": 3 }, { "content": "The man is sitting at a dining table.", "type": "attribute", "weight": 2 }, { "content": "The man is holding a white bowl of soup.", "type": "action", "weight": 1 }, { "content": "The camera switches to another man with short hair and a beard.", "type": "camera", "weight": 3 }, { "content": "The man with short hair and a beard is holding a silver spoon", "type": "action", "weight": 3 }, { "content": "The man with short hair and a beard is chewing and talking.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches again to show a female in a white dress wearing a sparkling necklace, the female is looking at a man and smiling then bowing her head. The camera switches to another male with glasses and a beard, wiping his glasses with his finger and then turning his milky head. The camera switches to a man with curly hair who is holding a spoon and talking, and finally, the camera focuses on an older man wearing a black dress and white shirt with a white flower pinned to his chest and is tilting his head.", "visual_elements": [ { "content": "The camera switches again to show a female in a white dress wearing a sparkling necklace.", "type": "camera", "weight": 3 }, { "content": "The female is looking at a man and smiling then bowing her head.", "type": "action", "weight": 3 }, { "content": "The camera switches to another male with glasses and a beard.", "type": "camera", "weight": 3 }, { "content": "The male is wiping his glasses with his finger and then turning his milky head.", "type": "action", "weight": 2 }, { "content": "The camera switches to a man with curly hair.", "type": "camera", "weight": 3 }, { "content": "The male is holding a spoon and talking", "type": "action", "weight": 2 }, { "content": "The camera focuses on an older man", "type": "camera", "weight": 3 }, { "content": "The older man is wearing a black dress and white shirt.", "type": "attribute", "weight": 2 }, { "content": "The older man is wearing a white flower pinned to his chest.", "type": "attribute", "weight": 2 }, { "content": "The older man is tilting his head.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0269", "video_path": "VELOCITI/v_Em4igIXJRgw_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a male standing in a carriage wearing a gray suit looking at the camera as he walks down the corridor of a train, pulling a cigarette out of his pocket as he looks back with the cigarette in his mouth. He walks past several carriage doors and at the end of the corridor is a blue door, he opens the door and enters a new carriage before continuing to look ahead with the cigarette in his mouth.\nFinally, the camera cuts to a shouting image of a blue door at the end of a hallway with curtains drawn and a male dressed in green inside the door.", "events": [ { "event": "The video begins with the camera focusing on a male standing in a carriage wearing a gray suit looking at the camera as he walks down the corridor of a train, pulling a cigarette out of his pocket as he looks back with the cigarette in his mouth. He walks past several carriage doors and at the end of the corridor is a blue door, he opens the door and enters a new carriage before continuing to look ahead with the cigarette in his mouth.", "visual_elements": [ { "content": "The camera focusing on a male standing in a carriage wearing a gray suit looking at the camera.", "type": "camera", "weight": 3 }, { "content": "The male walks down the corridor of a train, pulling a cigarette out of his pocket.", "type": "action", "weight": 2 }, { "content": "The male looks back with the cigarette in his mouth.", "type": "action", "weight": 2 }, { "content": "He walks past several carriage doors.", "type": "action", "weight": 2 }, { "content": "At the end of the corridor is a blue door.", "type": "scene", "weight": 2 }, { "content": "The male opens the door and enters a new carriage.", "type": "action", "weight": 2 }, { "content": "The male continues to look ahead with the cigarette in his mouth.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera cuts to a shouting image of a blue door at the end of a hallway with curtains drawn and a male dressed in green inside the door.", "visual_elements": [ { "content": "The camera cuts to a shouting image of a blue door at the end of a hallway.", "type": "camera", "weight": 3 }, { "content": "The hallway with curtains drawn and a male dressed in green inside the door.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0270", "video_path": "VELOCITI/v_FOWZ7B1QenY_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a middle-aged man in a dark suit standing in front of an office window. The man is wearing a red patterned tie and has a pink pocket towel stuck in his breast pocket as a young man in a gray suit enters the frame.\nThe camera switches to the young man who is looking at the middle-aged man, the camera switches to a panoramic view of the young man getting ready to sit on a stool and the middle-aged man waving towards the young man, in the background you can see the cityscape out of the window and the decorations in the office. The camera switches to the young man getting up from his seat, the camera switches again to the middle-aged man who is wearing a bluetooth headset as he continues to talk, the camera goes back to the young man as he stands in front of a bookshelf with books and decorations in the background.\nFinally, the camera switches again to the middle-aged man, who is still standing in front of the window, continuing to talk. The camera returns to the younger man, who is standing in front of a bookshelf, and he is listening intently to the middle-aged man's words. The video ends in a scene where the two men are conversing.", "events": [ { "event": "The video begins with the camera focusing on a middle-aged man in a dark suit standing in front of an office window. The man is wearing a red patterned tie and has a pink pocket towel stuck in his breast pocket as a young man in a gray suit enters the frame.", "visual_elements": [ { "content": "The camera focusing on a middle-aged man in a dark suit standing in front of an office window.", "type": "camera", "weight": 3 }, { "content": "The male is wearing a red patterned tie.", "type": "attribute", "weight": 2 }, { "content": "The male has a pink pocket towel stuck in his breast pocket .", "type": "attribute", "weight": 2 }, { "content": "A young man in a gray suit enters the frame.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to the young man who is looking at the middle-aged man, the camera switches to a panoramic view of the young man getting ready to sit on a stool and the middle-aged man waving towards the young man, in the background you can see the cityscape out of the window and the decorations in the office. The camera switches to the young man getting up from his seat, the camera switches again to the middle-aged man who is wearing a bluetooth headset as he continues to talk, the camera goes back to the young man as he stands in front of a bookshelf with books and decorations in the background.", "visual_elements": [ { "content": "The camera switches to the young man who is looking at the middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The camera switches to a panoramic view.", "type": "camera", "weight": 3 }, { "content": "The young man getting ready to sit on a stool.", "type": "action", "weight": 2 }, { "content": "The middle-aged man waving towards the young man.", "type": "action", "weight": 2 }, { "content": "The background is the city view from the window and the decoration in the office.", "type": "scene", "weight": 2 }, { "content": "The camera switches to the young man getting up from his seat.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man who is wearing a bluetooth headset as he continues to talk.", "type": "attribute", "weight": 2 }, { "content": "The camera goes back to the young man as he stands in front of a bookshelf.", "type": "camera", "weight": 3 }, { "content": "Books and decorations on a bookshelf in the background.", "type": "scene", "weight": 3 } ] }, { "event": "Finally, the camera switches again to the middle-aged man, who is still standing in front of the window, continuing to talk. The camera returns to the younger man, who is standing in front of a bookshelf, and he is listening intently to the middle-aged man's words. The video ends in a scene where the two men are conversing.", "visual_elements": [ { "content": "The camera switches again to the middle-aged man who is still standing in front of the window, continuing to talk.", "type": "camera", "weight": 3 }, { "content": "The camera returns to the younger man who is standing in front of a bookshelf.", "type": "camera", "weight": 3 }, { "content": "The younger man is listening intently to the middle-aged man's words.", "type": "action", "weight": 2 }, { "content": "The video ends in a scene where the two men are conversing.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0271", "video_path": "VELOCITI/v_FX4J_vERu9I_seg_110_120.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a male character appearing on frame wearing a black sleeveless battle suit draped in a black cape. He holds a huge battle axe and cleaves his enemies to pieces. In the background is an ancient castle, with flames shooting out from all sides as enemies are knocked to the ground, some engulfed in flames.\nThe camera carries on switching as a long-haired male character cleaves a white-haired enemy, whose body instantly turns into black and gray shards bursting into flames. The male in the sleeveless combat suit rises from the flames and slams the long-haired enemy in front of him out of the window of the ancient castle. The camera switches in on a close-up of the face of the male character in the sleeveless combat suit, his expression firm and determined.\nFinally, the camera switches to the sleeveless male character breaking out of the window from the old castle with his weapon as he rams two enemies from the window down.", "events": [ { "event": "The video begins with a male character appearing on frame wearing a black sleeveless battle suit draped in a black cape. He holds a huge battle axe and cleaves his enemies to pieces. In the background is an ancient castle, with flames shooting out from all sides as enemies are knocked to the ground, some engulfed in flames.", "visual_elements": [ { "content": "A male character appearing on frame.", "type": "attribute", "weight": 3 }, { "content": "The male character is wearing a black sleeveless battle suit draped in a black cape.", "type": "attribute", "weight": 2 }, { "content": "The male character holds a huge battle axe and cleaves his enemies to pieces.", "type": "action", "weight": 2 }, { "content": "In the background is an ancient castle。", "type": "scene", "weight": 2 }, { "content": "Flames shoots out from all sides.", "type": "scene", "weight": 2 }, { "content": "Enemies are knocked to the ground, some engulfed in flames.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera carries on switching as a long-haired male character cleaves a white-haired enemy, whose body instantly turns into black and gray shards bursting into flames. The male in the sleeveless combat suit rises from the flames and slams the long-haired enemy in front of him out of the window of the ancient castle. The camera switches in on a close-up of the face of the male character in the sleeveless combat suit, his expression firm and determined.", "visual_elements": [ { "content": "The camera carries on switching.", "type": "camera", "weight": 3 }, { "content": "A long-haired male character cleaves a white-haired enemy.", "type": "action", "weight": 3 }, { "content": "The enemy's body instantly turns into black and gray shards bursting into flames.", "type": "attribute", "weight": 2 }, { "content": "The male in the sleeveless combat suit rises from the flames.", "type": "action", "weight": 2 }, { "content": "The male in the sleeveless combat suit slams the long-haired enemy in front of him out of the window of the ancient castle.", "type": "action", "weight": 3 }, { "content": "The camera switches in on a close-up of the face of the male character in the sleeveless combat suit.", "type": "camera", "weight": 3 }, { "content": "The male's expression is firm and determined.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera switches to the sleeveless male character breaking out of the window from the old castle with his weapon as he rams two enemies from the window down.", "visual_elements": [ { "content": "The camera switches to the sleeveless male character breaking out of the window from the old castle .", "type": "camera", "weight": 3 }, { "content": "The male rams two enemies from the window down.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0272", "video_path": "VELOCITI/v_FX4J_vERu9I_seg_125_135.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera showing the exterior of a gothic building with a huge clock on the building. A winged creature flies over the building spewing flames.\nThe camera switches to a close up of two characters in a fierce fight. One character is wearing dark clothing and the other is wearing armor-like clothing. The camera switches again to a close-up of a monster and a long-haired male character. The camera switches to a hallway where several characters are running quickly, trying to escape the flames.\nThe camera switches to a heavy wooden door, which is quickly closed then the camera switches to a spacious room with a throne in the center of the room, on which sits a character struggling in chains. On either side stand two characters in armor who are armed.", "events": [ { "event": "The video begins with the camera showing the exterior of a gothic building with a huge clock on the building. A winged creature flies over the building spewing flames.", "visual_elements": [ { "content": "The video begins with the camera showing the exterior of a gothic building.", "type": "camera", "weight": 3 }, { "content": "A huge clock on the building.", "type": "scene", "weight": 2 }, { "content": "A winged creature flies over the building spewing flames.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a close up of two characters in a fierce fight. One character is wearing dark clothing and the other is wearing armor-like clothing. The camera switches again to a close-up of a monster and a long-haired male character. The camera switches to a hallway where several characters are running quickly, trying to escape the flames.", "visual_elements": [ { "content": "The camera switches to a close up of two characters in a fierce fight.", "type": "camera", "weight": 3 }, { "content": "One character is wearing dark clothing.", "type": "attribute", "weight": 2 }, { "content": "The other is wearing armor-like clothing.", "type": "attribute", "weight": 2 }, { "content": "The camera switches again to a close-up of a monster and a long-haired male character.", "type": "camera", "weight": 3 }, { "content": "The camera switches to a hallway where several characters are running quickly, trying to escape the flames.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches to a heavy wooden door, which is quickly closed then the camera switches to a spacious room with a throne in the center of the room, on which sits a character struggling in chains. On either side stand two characters in armor who are armed.", "visual_elements": [ { "content": "The camera switches to a heavy wooden door.", "type": "camera", "weight": 3 }, { "content": "The door is quickly closed.", "type": "attribute", "weight": 3 }, { "content": "The camera switches to a spacious room .", "type": "camera", "weight": 3 }, { "content": "A throne in the center of the room.", "type": "scene", "weight": 2 }, { "content": "The throne sits a character struggling in chains.", "type": "attribute", "weight": 2 }, { "content": "On either side stand two characters in armor who are armed.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0273", "video_path": "VELOCITI/v_FX4J_vERu9I_seg_140_150.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a male character wearing a dark sleeveless top with his hair tied back in a ponytail as he cuts the shackles off of a chained male character.\nThe camera switches to a dragon with huge wings spread out flying low over the castle as a group of people run inside the building. The camera switches to a black robed character jumping down from a high place with huge wings spread out and a group of enemies in the distance. The camera switches to a picture of a character in black robes, holding a long axe that cleaves the enemies and bursts into flames.\nFinally, the camera switches to two characters in combat uniforms standing in front of the chained male character, with the character on the left handing the weapon to the character in the center. Finally, a close-up of the camera shows the face of the bound male character, who has a serious expression with determined eyes.", "events": [ { "event": "The video begins with the camera focusing on a male character wearing a dark sleeveless top with his hair tied back in a ponytail as he cuts the shackles off of a chained male character.", "visual_elements": [ { "content": "The camera focusing on a male character wearing a dark sleeveless top.", "type": "camera", "weight": 3 }, { "content": "The male's hair tied back in a ponyta", "type": "attribute", "weight": 3 }, { "content": "The male cuts the shackles off of a chained male character.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to a dragon with huge wings spread out flying low over the castle as a group of people run inside the building. The camera switches to a black robed character jumping down from a high place with huge wings spread out and a group of enemies in the distance. The camera switches to a picture of a character in black robes, holding a long axe that cleaves the enemies and bursts into flames.", "visual_elements": [ { "content": "The camera switches to a dragon with huge wings spread out flying low over the castle.", "type": "camera", "weight": 3 }, { "content": "A group of people run inside the building.", "type": "scene", "weight": 2 }, { "content": "The camera switches to a black robed character jumping down from a high place with huge wings spread out .", "type": "camera", "weight": 3 }, { "content": "A group of enemies in the distance.", "type": "scene", "weight": 2 }, { "content": "The camera switches to a picture of a character in black robes.", "type": "camera", "weight": 3 }, { "content": "The character is holding a long axe that cleaves the enemies and bursts into flames.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera switches to two characters in combat uniforms standing in front of the chained male character, with the character on the left handing the weapon to the character in the center. Finally, a close-up of the camera shows the face of the bound male character, who has a serious expression with determined eyes.", "visual_elements": [ { "content": "The camera switches to two characters in combat uniforms standing in front of the chained male character.", "type": "camera", "weight": 3 }, { "content": "The character on the left handing the weapon to the character in the center.", "type": "action", "weight": 2 }, { "content": "A close-up of the camera shows the face of the bound male character.", "type": "camera", "weight": 3 }, { "content": "The bound male character has a serious expression with determined eyes.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0274", "video_path": "VELOCITI/v_FZ65jfSwpAk_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera showing a steep city street with buildings and towers in the distance in the background. Several cars are parked on either side of the street, and a blonde woman in a light-colored jacket is getting out of the driver's seat of one of the cars.\nThe camera switches to the entrance of a building and a male wearing a dark suit and hat walks out of the building. The male holds a piece of paper in his hand and walks towards a car parked on the side of the street.\nThe camera switches to male walking to car where blonde female is, bending down to talk to her through the window. The camera switches to the female sitting in the car talking to the male.", "events": [ { "event": "The video begins with the camera showing a steep city street with buildings and towers in the distance in the background. Several cars are parked on either side of the street, and a blonde woman in a light-colored jacket is getting out of the driver's seat of one of the cars.", "visual_elements": [ { "content": "The camera showing a steep city street.", "type": "camera", "weight": 3 }, { "content": "Buildings and towers in the distance in the background.", "type": "scene", "weight": 2 }, { "content": "Several cars are parked on either side of the street,.", "type": "scene", "weight": 2 }, { "content": "A blonde woman in a light-colored jacket is getting out of the driver's seat of one of the cars.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the entrance of a building and a male wearing a dark suit and hat walks out of the building. The male holds a piece of paper in his hand and walks towards a car parked on the side of the street.", "visual_elements": [ { "content": "The camera switches to the entrance of a building.", "type": "camera", "weight": 3 }, { "content": "A male wearing a dark suit and hat walks out of the building", "type": "action", "weight": 2 }, { "content": "The male holds a piece of paper in his hand and walks towards a car parked on the side of the street.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to male walking to car where blonde female is, bending down to talk to her through the window. The camera switches to the female sitting in the car talking to the male.", "visual_elements": [ { "content": "The camera switches to male walking to car where blonde female is, bending down to talk to her through the window.", "type": "camera", "weight": 3 }, { "content": "The camera switches to the female sitting in the car talking to the male.", "type": "camera", "weight": 2 } ] } ], "n_events": 3, "n_elements": 9 }, { "index": "TUNA_0275", "video_path": "VELOCITI/v_G6PcFmNCQpA_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a bar counter. On the left of the screen is a white woman in a purple dress and a white shawl with a glass of wine in her hand. On the opposite of the woman sits a white man in a dark shirt. In the background a TV screen can be seen, besides which are some decorations and equipments of the bar.\nThe woman supports her elbows on the bar counter with a wine glass in her right hand, and her body leans forward slightly. The man with his hands crossed on the bar counter looks at the woman and talks to her. Then the woman turns her head to the right and turns around. She raises the wine glass to the man and takes a sip. The man turns his head to the left after the woman.\nThen the camera switches to a dusky scene. A woman with long hair in white bikini and shiny skirt stands in the center of the screen, and another man stands behind her. The man pulls the skirt of the long-haired woman with his hand and talks to her. The woman twists her body slightly with a resistant expression.", "events": [ { "event": "The video begins as the camera zooms in on a bar counter. On the left of the screen is a white woman in a purple dress and a white shawl with a glass of wine in her hand. On the opposite of the woman sits a white man in a dark shirt. In the background a TV screen can be seen, besides which are some decorations and equipments of the bar.", "visual_elements": [ { "content": "The camera zooms in on a bar counter.", "type": "camera", "weight": 3 }, { "content": "On the left of the screen is a white woman in a purple dress and a white shawl.", "type": "attribute", "weight": 3 }, { "content": "The woman has a glass of wine in her right hand.", "type": "attribute", "weight": 2 }, { "content": "A white man in a dark shirt sits opposite to the woman.", "type": "attribute", "weight": 3 }, { "content": "A TV screen can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "Besides are some decorations and equipments of the bar.", "type": "attribute", "weight": 1 } ] }, { "event": "The woman supports her elbows on the bar counter with a wine glass in her right hand, and her body leans forward slightly. The man with his hands crossed on the bar counter looks at the woman and talks to her. Then the woman turns her head to the right and turns around. She raises the wine glass to the man and takes a sip. The man turns his head to the left after the woman.", "visual_elements": [ { "content": "The woman supports her elbows on the bar counter.", "type": "attribute", "weight": 2 }, { "content": "The woman leans forward slightly.", "type": "attribute", "weight": 2 }, { "content": "The woman holds the wine glass in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The man crosses his hands on the bar counter.", "type": "action", "weight": 1 }, { "content": "The man looks at the woman and talks to her.", "type": "action", "weight": 3 }, { "content": "The woman turns her head to the right and turns her body around.", "type": "action", "weight": 3 }, { "content": "The woman raises the wine glass to the man.", "type": "action", "weight": 2 }, { "content": "The woman takes a sip.", "type": "action", "weight": 2 }, { "content": "The man turns his head to the left after the woman.", "type": "action", "weight": 3 } ] }, { "event": "Then the camera switches to a dusky scene. A woman with long hair in white bikini and shiny skirt stands in the center of the screen, and another man stands behind her. The man pulls the skirt of the long-haired woman with his hand and talks to her. The woman twists her body slightly with a resistant expression.", "visual_elements": [ { "content": "The camera switches to a dusky screen.", "type": "camera", "weight": 3 }, { "content": "A woman with long hair in a white bikini and a shiny skirt stands in the center of the screen.", "type": "attribute", "weight": 3 }, { "content": "There is a man stands behind the long-haired woman.", "type": "attribute", "weight": 2 }, { "content": "The man pulls the skirt of the long-haired woman and talks to her.", "type": "action", "weight": 3 }, { "content": "The woman twists her body slightly with a resistant expression.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0276", "video_path": "VELOCITI/v_G6i3bDGOLB8_seg_85_95.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera zooms in on a man who wears a black jacket and a black and red plaid shirt. The camera only shoots the body's part that above his waist and below his head. He stands besides a vehicle with an outdoor scene at night in the background. Far away are a few street lamps. After that, the camera zooms in quickly.\nThen the camera switches to a parking lot, with a restaurant in the background. The neon sign of the restaurant says \"RESTAURANT\". There are few other vehicles parking on the parking lot. Then, the camera captures two men fighting with each other besides a vehicle, then they fall onto the ground. The man above wears a red hat, a black jacket and light-coloured trousers, and another man below wears a light-coloured cloth. The screen is shot by the camera from a low angle. After that, a man in a blue hat, a light-coloured coat and jeans throws himself upon them from the rear. The camera moves to the right with his movement. The man in the blue hat knocks down the man in the red hat. There is another man who wears a red hat and a black jacket runs towards them, and trys to stop them from fighting in a flurry.\nFinally, the camera switches perspective. A man in a black jacket and a black-and-red plaid shirt with the corners of his mouth bleeding, is pulled up by other two men,then pulled afterwards and pressed to the vehicle. The camera zooms in, a man in a red hat appears on the screen, and throws two punches to the man who is pressed to the vehicle in his stomach.", "events": [ { "event": "At the beginning of the video, the camera zooms in on a man who wears a black jacket and a black and red plaid shirt. The camera only shoots the body's part that above his waist and below his head. He stands besides a vehicle with an outdoor scene at night in the background. Far away are a few street lamps. After that, the camera zooms in quickly.", "visual_elements": [ { "content": "The camera zooms in on a man who wears a black jacket and a black-and-red plaid shirt.", "type": "camera", "weight": 3 }, { "content": "The camera only shoots the body's part that above his waist and below his head.", "type": "camera", "weight": 3 }, { "content": "The man stands besides a vehicle.", "type": "attribute", "weight": 2 }, { "content": "An outdoor scene at night is in the background.", "type": "scene", "weight": 2 }, { "content": "There are a few street lamps far away.", "type": "attribute", "weight": 1 }, { "content": "The camera zooms in quickly.", "type": "camera", "weight": 3 } ] }, { "event": "Then the camera switches to a parking lot, with a restaurant in the background. The neon sign of the restaurant says \"RESTAURANT\". There are few other vehicles parking on the parking lot. Then, the camera captures two men fighting with each other besides a vehicle, then they fall onto the ground. The man above wears a red hat, a black jacket and light-coloured trousers, and another man below wears a light-coloured cloth. The screen is shot by the camera from a low angle. After that, a man in a blue hat, a light-coloured coat and jeans throws himself upon them from the rear. The camera moves to the right with his movement. The man in the blue hat knocks down the man in the red hat. There is another man who wears a red hat and a black jacket runs towards them, and trys to stop them from fighting in a flurry.", "visual_elements": [ { "content": "The camera switches to a parking lot.", "type": "camera", "weight": 3 }, { "content": "There is a restaurant in the background.", "type": "scene", "weight": 2 }, { "content": "The neon sign of the restaurant says \"RESTAURANT\".", "type": "attribute", "weight": 2 }, { "content": "There are few other vehicles parking on the parking lot.", "type": "attribute", "weight": 1 }, { "content": "The camera captures two men fighting with each other besides a vehicle.", "type": "camera", "weight": 3 }, { "content": "The two men fall onto the ground.", "type": "action", "weight": 3 }, { "content": "The man above wears a red hat, a black jacket and light-coloured trousers.", "type": "attribute", "weight": 2 }, { "content": "Another man below wears a light-coloured cloth.", "type": "attribute", "weight": 2 }, { "content": "The screen is shot by the camera from a low angle.", "type": "camera", "weight": 3 }, { "content": "A man in a blue hat, a light-coloured coat and jeans throws himself upon them from the rear.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right with his movement.", "type": "camera", "weight": 3 }, { "content": "The man in the blue hat knocks down the man in the red hat.", "type": "action", "weight": 3 }, { "content": "There is another man who wears a red hat and a black jacket runs towards them.", "type": "action", "weight": 2 }, { "content": "The man trys to stop them from fighting in a flurry.", "type": "action", "weight": 1 } ] }, { "event": "Finally, the camera switches perspective. A man in a black jacket and a black-and-red plaid shirt with the corners of his mouth bleeding, is pulled up by other two men,then pulled afterwards and pressed to the vehicle. The camera zooms in, a man in a red hat appears on the screen, and throws two punches to the man who is pressed to the vehicle in his stomach.", "visual_elements": [ { "content": "The camera switches perspective.", "type": "camera", "weight": 3 }, { "content": "A man in a black jacket and a black-and-red plaid shirt is bleeding from the corners of his mouth.", "type": "attribute", "weight": 3 }, { "content": "Then the man is pulled up by other two men, pulled afterwards and pressed to the vehicle.", "type": "action", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "A man in a red hat appears on the screen.", "type": "attribute", "weight": 2 }, { "content": "The man throws two punches to the man who is pressed to the vehicle in the stomach.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 26 }, { "index": "TUNA_0277", "video_path": "VELOCITI/v_GKh4VG9YQ1Q_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, two persons appear on the screen. A woman with short hair in a dark long coat stands on the right of the screen. She has a lit cigarette in her right hand, and crosses her arms around her chest after she draws on the cigarette and puffs out a cloud of smoke. Another young man in a dark coat and trousers sits on a bench on the left of the screen. There is a same bench on the left of the man. The man stands up and walks towards the standing woman. The camera zooms in gradually with the movement of the man. A corridor is in the background. The balustrades of the corridor and trees outside are covered with twinkling small white lamps.", "events": [ { "event": "At the beginning of the video, two persons appear on the screen. A woman with short hair in a dark long coat stands on the right of the screen. She has a lit cigarette in her right hand, and crosses her arms around her chest after she draws on the cigarette and puffs out a cloud of smoke. Another young man in a dark coat and trousers sits on a bench on the left of the screen. There is a same bench on the left of the man. The man stands up and walks towards the standing woman. The camera zooms in gradually with the movement of the man. A corridor is in the background. The balustrades of the corridor and trees outside are covered with twinkling small white lamps.", "visual_elements": [ { "content": "Two persons appear on the screen.", "type": "attribute", "weight": 2 }, { "content": "A woman with short hair in a dark long coat stands on the right of the screen.", "type": "attribute", "weight": 3 }, { "content": "The woman has a lit cigarette in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The woman crosses her arms around her chest after she draws on the cigarette and puffs out a cloud of smoke.", "type": "action", "weight": 3 }, { "content": "Another young man in a dark coat and trousers sits on a bench on the left of the screen.", "type": "attribute", "weight": 3 }, { "content": "There is a same bench on the left of the man.", "type": "attribute", "weight": 2 }, { "content": "The man stands up and walks towards the standing woman.", "type": "action", "weight": 3 }, { "content": "The camera zooms in gradually with the movement of the man.", "type": "camera", "weight": 3 }, { "content": "A corridor is in the background.", "type": "scene", "weight": 1 }, { "content": "The balustrades of the corridor and trees outside are covered with twinkling small white lights.", "type": "attribute", "weight": 2 } ] } ], "n_events": 1, "n_elements": 10 }, { "index": "TUNA_0278", "video_path": "VELOCITI/v_GrIJLWISdlQ_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on the backview of a young woman in a white long skirt. She is hanged in the air with her hands tied up on a wooden frame. A giant monster with red eyes can be seen in the background. The woman faces the monster.\nThen, the camera switches to the front of the woman. The close shot shows the upper part of her body and her face. She has curved dark hair and a necklace on her neck.\nThe camera switches another time, and shows the front feature of the monster. The monster is gigantic in size, with its mouth open and its sharp teeth exposed. The buildings of the city from far away can be seen in the background.\nThen, the camera switches to the close shot of a man in armour. He is getting up from the ground. The camera switches to the middle shot, with flames and smoke in the background, and a crowd of people in white cloth are running away.\nThe camera switches again, and shows a crowd of people in white clothes. People in the square are dense and confused. Some of them are fleeing in a rush, and some are preparing to fight against the monster with arms in their hands.\nThen, the camera switches to a few persons in similar clothes . They push down the wooden frame on which the woman is hanged.\nFinally, the camera zooms in on the monster again. The monster is roaring, stirring up bursts of waves.", "events": [ { "event": "The video begins as the camera zooms in on the backview of a young woman in a white long skirt. She is hanged in the air with her hands tied up on a wooden frame. A giant monster with red eyes can be seen in the background. The woman faces the monster.", "visual_elements": [ { "content": "The video begins as the camera zooms in on the backview of a young woman in a white long skirt.", "type": "camera", "weight": 3 }, { "content": "The woman's hands are tied up on a wooden frame.", "type": "attribute", "weight": 2 }, { "content": "The woman is hanged in the air.", "type": "attribute", "weight": 2 }, { "content": "A giant monster can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The monster's eyes are red.", "type": "attribute", "weight": 2 }, { "content": "The woman faces the monster.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera switches to the front of the woman. The close shot shows the upper part of her body and her face. She has curved dark hair and a necklace on her neck.", "visual_elements": [ { "content": "Then, the camera switches to the front of the woman.", "type": "camera", "weight": 3 }, { "content": "The close shot shows the upper part of her body and her face.", "type": "camera", "weight": 3 }, { "content": "She has curved dark hair.", "type": "attribute", "weight": 2 }, { "content": "With a necklace on her neck.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches another time, and shows the front feature of the monster. The monster is gigantic in size, with its mouth open and its sharp teeth exposed. The buildings of the city from far away can be seen in the background.", "visual_elements": [ { "content": "The camera switches for another time.", "type": "camera", "weight": 3 }, { "content": "It shows the front feature of the monster.", "type": "camera", "weight": 3 }, { "content": "The monster is gigantic in size.", "type": "attribute", "weight": 3 }, { "content": "Its mouth open.", "type": "action", "weight": 3 }, { "content": "And its sharp teeth exposed.", "type": "attribute", "weight": 3 }, { "content": "The buildings of the city from far away can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to the close shot of a man in armour. He is getting up from the ground. The camera switches to the middle shot, with flames and smoke in the background, and a crowd of people in white cloth are running away.", "visual_elements": [ { "content": "The camera switches to the close shot of a man in armour.", "type": "camera", "weight": 3 }, { "content": "He is getting up from the ground.", "type": "action", "weight": 2 }, { "content": "The camera switches to the middle shot.", "type": "camera", "weight": 3 }, { "content": "With flames and smoke in the background.", "type": "scene", "weight": 2 }, { "content": "And a crowd of people in white cloth are running away.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches again, and shows a crowd of people in white clothes. People in the square are dense and confused. Some of them are fleeing in a rush, and some are preparing to fight against the monster with arms in their hands.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "Shows a crowd of people in white clothes.", "type": "attribute", "weight": 2 }, { "content": "People in the square are dense and confused.", "type": "attribute", "weight": 2 }, { "content": "Some of them are fleeing in a rush.", "type": "action", "weight": 2 }, { "content": "Some are preparing to fight against the monster with arms in their hands.", "type": "action", "weight": 2 } ] }, { "event": "Then, the camera switches to a few persons in similar clothes . They push down the wooden frame on which the woman is hanged.", "visual_elements": [ { "content": "The camera switches perspective.", "type": "camera", "weight": 3 }, { "content": "A few persons in similar clothes are pushing down the wooden frame on which the woman is hanged.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera zooms in on the monster again. The monster is roaring, stirring up bursts of waves.", "visual_elements": [ { "content": "The camera zooms in on the monster again.", "type": "camera", "weight": 3 }, { "content": "The monster is roaring.", "type": "action", "weight": 3 }, { "content": "Its roarings stir up bursts of waves.", "type": "action", "weight": 1 } ] } ], "n_events": 7, "n_elements": 31 }, { "index": "TUNA_0279", "video_path": "VELOCITI/v_GtYAzKwm-R0_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a young man sitting on the sofa. He wears a black jacket and a grey shirt. He is talking. The background is a bright living room. Green lawn and a blue trampoline can be seen from the window. There are several pillows and a Children's book on the sofa.\nThe camera switches to a woman. She wears a purple shirt and grey trousers. She squats in a corner of the living room, looking for something. Then, she turns left and begins to talk. An old-fashioned black television is on the left of the screen. Beside the woman is a table covered with a piece of printed cloth. There are some sundries and pieces of paper on the table. On the left handside behind her is a blue sofa on which a pillow and a game console are placed.\nThe camera switches back to the young man sitting on the sofa. He glances sideways as he talks.\nThe camera switches again to the woman. She stands up. The camera moves upwards with her movement. She begins to move forward in the room with her hands swinging beside her body. Several decorative paintings on the walls and a bookshelf can be seen in the background. The bookshelve is loaded with books and some decorations.\nThe camera returns back to the young man. He still sits on the sofa. The camera shows the medium shot. The decorations in the living room can be seen. There are many children's toys and sundries in the room. A man in a black vest, a beige shirt and black trousers stands on the right of the video. The camera only shows the view below the man's head. He has his hands in the pockets on the screen.\nThe camera switches back to the woman. She stands in the room and her hands spread beside her body.", "events": [ { "event": "The video begins as the camera zooms in on a young man sitting on the sofa. He wears a black jacket and a grey shirt. He is talking. The background is a bright living room. Green lawn and a blue trampoline can be seen from the window. There are several pillows and a Children's book on the sofa.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a young man sitting on the sofa.", "type": "camera", "weight": 3 }, { "content": "The man wears a black jacket and a grey shirt.", "type": "attribute", "weight": 3 }, { "content": "The man is talking.", "type": "action", "weight": 3 }, { "content": "The background is a bright living room.", "type": "scene", "weight": 2 }, { "content": "Green lawn and a blue trampoline can be seen from the window.", "type": "attribute", "weight": 2 }, { "content": "There are several pillows and a Children's book on the sofa.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches to a woman. She wears a purple shirt and grey trousers. She squats in a corner of the living room, looking for something. Then, she turns left and begins to talk. An old-fashioned black television is on the left of the screen. Beside the woman is a table covered with a piece of printed cloth. There are some sundries and pieces of paper on the table. On the left handside behind her is a blue sofa on which a pillow and a game console are placed.", "visual_elements": [ { "content": "The camera switches to a woman.", "type": "camera", "weight": 3 }, { "content": "She wears a purple shirt and grey trousers.", "type": "attribute", "weight": 3 }, { "content": "She squats in a corner of the living room, looking for something.", "type": "action", "weight": 3 }, { "content": "The woman turns left and begins to talk.", "type": "action", "weight": 3 }, { "content": "An old-fashioned black television is on the left of the screen.", "type": "attribute", "weight": 2 }, { "content": "Beside the woman is a table covered with a piece of printed cloth.", "type": "attribute", "weight": 2 }, { "content": "There are some sundries and pieces of paper on the table.", "type": "attribute", "weight": 2 }, { "content": "On the left handside behind her is a blue sofa on which a pillow and a game console are placed.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches back to the young man sitting on the sofa. He glances sideways as he talks.", "visual_elements": [ { "content": "The camera switches back to the young man sitting on the sofa.", "type": "camera", "weight": 3 }, { "content": "The man glances sideways as he talks.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again to the woman. She stands up. The camera moves upwards with her movement. She begins to move forward in the room with her hands swinging beside her body. Several decorative paintings on the walls and a bookshelf can be seen in the background. The bookshelve is loaded with books and some decorations.", "visual_elements": [ { "content": "The camera switches again to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman stands up.", "type": "action", "weight": 2 }, { "content": "The camera moves upwards with her movement.", "type": "camera", "weight": 3 }, { "content": "She begins to move forward in the room.", "type": "action", "weight": 3 }, { "content": "Her hands swing beside her body.", "type": "action", "weight": 3 }, { "content": "Several decorative paintings on the walls and a bookshelf can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The bookshelf is loaded with books and some decorations.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera returns back to the young man. He still sits on the sofa. The camera shows the medium shot. The decorations in the living room can be seen. There are many children's toys and sundries in the room. A man in a black vest, a beige shirt and black trousers stands on the right of the video. The camera only shows the view below the man's head. He has his hands in the pockets on the screen.", "visual_elements": [ { "content": "The camera returns back to the young man.", "type": "camera", "weight": 3 }, { "content": "He still sits on the sofa.", "type": "attribute", "weight": 1 }, { "content": "The camera shows the medium shot.", "type": "camera", "weight": 3 }, { "content": "The decorations in the living room can be seen.", "type": "attribute", "weight": 1 }, { "content": "There are many children's toys and sundries in the room.", "type": "attribute", "weight": 2 }, { "content": "A man in a black vest, a beige shirt and black trousers stands on the right of the video.", "type": "attribute", "weight": 3 }, { "content": "The camera only shows the view below the man's head.", "type": "camera", "weight": 3 }, { "content": "The man standing on the screen has his hands in his pockets.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches back to the woman. She stands in the room and her hands spread beside her body.", "visual_elements": [ { "content": "The camera switches back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman stands in the room.", "type": "action", "weight": 2 }, { "content": "The woman's hands spread besides her body.", "type": "action", "weight": 1 } ] } ], "n_events": 6, "n_elements": 34 }, { "index": "TUNA_0280", "video_path": "VELOCITI/v_H46x8fD7WzE_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a young woman with short hair in a brown sweater. She stands beside a black car with a brown wallet in her left hand. The young woman moves forward while talking happily, during which her left hand shakes up and down while holding the wallet. The camera moves upwards slightly with the movement of the woman. A truck, a passenger passing by and a blur signboard can be seen in the background.\nThen, the camera shifts to a man in a dark jacket and a grey shirt. He sits in a white convertible with glasses in his left hand. At the same time, his arm leans on the door, and his right hand holds the steering wheel. Then the man stands up and gets off the car. The camera moves upwards with his movement. The man stands beside the car, looking at the short-haired woman after shutting the door. In the background are green lawns and some trees.", "events": [ { "event": "The video begins as the camera zooms in on a young woman with short hair in a brown sweater. She stands beside a black car with a brown wallet in her left hand. The young woman moves forward while talking happily, during which her left hand shakes up and down while holding the wallet. The camera moves upwards slightly with the movement of the woman. A truck, a passenger passing by and a blur signboard can be seen in the background.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a young woman with short hair in a brown sweater.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman stands beside a black car.", "type": "attribute", "weight": 3 }, { "content": "The short-haired woman has a brown wallet in her left hand.", "type": "attribute", "weight": 2 }, { "content": "The young woman moves forward while talking happily,", "type": "action", "weight": 3 }, { "content": "In this period the woman's left hand shakes up and down while holding the wallet.", "type": "action", "weight": 2 }, { "content": "The camera moves upwards slightly with the movement of the woman.", "type": "camera", "weight": 3 }, { "content": "A truck, A passenger passing by and a blur signboard can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera shifts to a man in a dark jacket and a grey shirt. He sits in a white convertible with glasses in his left hand. At the same time, his arm leans on the door, and his right hand holds the steering wheel. Then the man stands up and gets off the car. The camera moves upwards with his movement. The man stands beside the car, looking at the short-haired woman after shutting the door. In the background are green lawns and some trees.", "visual_elements": [ { "content": "Then, the camera shifts to a man in a dark jacket and a grey shirt.", "type": "camera", "weight": 3 }, { "content": "The man sits in a white convertible.", "type": "attribute", "weight": 2 }, { "content": "The man holds glasses in his left hand. At the same time, his arm leans on the door.", "type": "attribute", "weight": 2 }, { "content": "The man's right hand holds the steering wheel.", "type": "attribute", "weight": 2 }, { "content": "The man stands up and gets off the car.", "type": "action", "weight": 3 }, { "content": "The camera moves upwards with his movement.", "type": "camera", "weight": 3 }, { "content": "The man stands beside the car, looking at the short-haired woman after shutting the door.", "type": "action", "weight": 2 }, { "content": "In the background are green lawns and some trees.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0281", "video_path": "VELOCITI/v_H46x8fD7WzE_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a young woman with short hair in a brown sweater. She stands beside a black car with a brown wallet in her left hand. Her right hand holds the door. On the screen she is talking to someone opposites her nearby. A signboard says \"Car Wash\"can be seen in the background.\nThe camera switches to a man stands outside. He wears a dark jacket and a grey shirt. A stretch of green woods and grasslands are in the background. The man talks facing the camera. In the beginning, the man stands steady, then he raises his right hand and spreads his fingers. After that, he tilts his head to the right, and his body swings to the left slightly. At last, the camera switches to the short-haired woman beside the car. She keeps the original posture with a smile on her face.", "events": [ { "event": "The video begins as the camera zooms in on a young woman with short hair in a brown sweater. She stands beside a black car with a brown wallet in her left hand. Her right hand holds the door. On the screen she is talking to someone opposites her nearby. A signboard says \"Car Wash\"can be seen in the background.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a young woman with short hair in a brown sweater.", "type": "camera", "weight": 3 }, { "content": "The short-haired woman stands beside a black car.", "type": "attribute", "weight": 2 }, { "content": "The short-haired woman has a brown wallet in her left hand.", "type": "attribute", "weight": 2 }, { "content": "The short-haired woman holds the door with her right hand.", "type": "attribute", "weight": 2 }, { "content": "On the screen she is talking to someone opposites her nearby.", "type": "action", "weight": 3 }, { "content": "A signboard says \"Car Wash\"can be seen in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a man stands outside. He wears a dark jacket and a grey shirt. A stretch of green woods and grasslands are in the background. The man talks facing the camera. In the beginning, the man stands steady, then he raises his right hand and spreads his fingers. After that, he tilts his head to the right, and his body swings to the left slightly. At last, the camera switches to the short-haired woman beside the car. She keeps the original posture with a smile on her face.", "visual_elements": [ { "content": "The camera switches to a man stands outside.", "type": "camera", "weight": 3 }, { "content": "The man wears a dark jacket and a grey shirt.", "type": "attribute", "weight": 2 }, { "content": "A stretch of green woods and grasslands are in the background.", "type": "scene", "weight": 1 }, { "content": "The man talks facing the camera.", "type": "action", "weight": 2 }, { "content": "In the beginning, the man stands steady.", "type": "action", "weight": 2 }, { "content": "Then he raises his right hand and spreads his fingers.", "type": "action", "weight": 3 }, { "content": "The man tilts his head to the right.", "type": "action", "weight": 2 }, { "content": "The man's body swings to the left slightly.", "type": "action", "weight": 2 }, { "content": "The camera switches to the short-haired woman beside the car.", "type": "camera", "weight": 3 }, { "content": "The woman keeps the original posture with a smile on her face.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0282", "video_path": "VELOCITI/v_HDxqSt_Ctbw_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins on a screen at night. In the background is a vast grassland. A recreational vehicle and a car are on the grassland. Two lamps illuminate surroundings. Three men walks to the car on the screen. One of them in dark clothes has a red bag in his right hand. He runs towards the car on the left.\nThe camera switches to another perspective, and shows another side of the recreational vehicle. Two men in dark clothes stands beside the trunk of a silver car. The trunk is open. A woman runs toward the car from the direction of the recreational vehicle, with a red bag in her hand.\nThe camera switches again. The screen shoots inside the car. A beared man sits on the backseat of the car. His head leans back on the seat. Blurred figures can be seen moving outside the car windows. The woman in dark clothes and with a red bag in her hand opens the rear door of the car. She sits opposite the beared man and pats the man's face with her hand slightly. Then she holds the man's head with both hands.\nThe camera switches and zooms in gradually, and shows the feature of the man's face who is in a coma. His eyes closed and a trace of blood can be seen on the corner of his mouth.", "events": [ { "event": "The video begins on a screen at night. In the background is a vast grassland. A recreational vehicle and a car are on the grassland. Two lamps illuminate surroundings. Three men walks to the car on the screen. One of them in dark clothes has a red bag in his right hand. He runs towards the car on the left.", "visual_elements": [ { "content": "The video begins on a screen at night.", "type": "scene", "weight": 1 }, { "content": "In the background is a vast grassland.", "type": "scene", "weight": 1 }, { "content": "A recreational vehicle and a car are on the grassland.", "type": "attribute", "weight": 2 }, { "content": "Two lamps illuminate surroundings.", "type": "attribute", "weight": 2 }, { "content": "Three men walks to the car on the screen.", "type": "action", "weight": 3 }, { "content": "One of them in dark clothes has a red bag in his right hand.", "type": "attribute", "weight": 2 }, { "content": "The man runs towards the car on the left.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to another perspective, and shows another side of the recreational vehicle. Two men in dark clothes stands beside the trunk of a silver car. The trunk is open. A woman runs toward the car from the direction of the recreational vehicle, with a red bag in her hand.", "visual_elements": [ { "content": "The camera switches to another perspective.", "type": "camera", "weight": 3 }, { "content": "Shows another side of the recreational vehicle.", "type": "attribute", "weight": 1 }, { "content": "Two men in dark clothes stands beside the trunk of a silver car.", "type": "attribute", "weight": 2 }, { "content": "The trunk is open.", "type": "attribute", "weight": 2 }, { "content": "A woman runs toward the car from the direction of the recreational vehicle, with a red bag in her hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again. The screen shoots inside the car. A beared man sits on the backseat of the car. His head leans back on the seat. Blurred figures can be seen moving outside the car windows. The woman in dark clothes and with a red bag in her hand opens the rear door of the car. She sits opposite the beared man and pats the man's face with her hand slightly. Then she holds the man's head with both hands.", "visual_elements": [ { "content": "The camera switches again and shoots inside the car.", "type": "camera", "weight": 3 }, { "content": "A beared man sits on the backseat of the car.", "type": "attribute", "weight": 3 }, { "content": "His head leans on the seat.", "type": "attribute", "weight": 2 }, { "content": "Blurred figures can be seen moving outside the car windows.", "type": "attribute", "weight": 1 }, { "content": "The woman in dark clothes and with a red bag in her hand opens the rear door of the car.", "type": "action", "weight": 2 }, { "content": "She sits opposite the beared man and pats the man's face with her hand slightly.", "type": "action", "weight": 3 }, { "content": "Then she holds the man's head with both hands.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches and zooms in gradually, and shows the feature of the man's face who is in a coma. His eyes closed and a trace of blood can be seen on the corner of his mouth.", "visual_elements": [ { "content": "The camera switches and zooms in gradually.", "type": "camera", "weight": 3 }, { "content": "Shows the feature of the man's face who is in a coma.", "type": "camera", "weight": 3 }, { "content": "His eyes closed.", "type": "attribute", "weight": 2 }, { "content": "A trace of blood can be seen on the corner of the man's mouth.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0283", "video_path": "VELOCITI/v_Ht6uQH8qIf0_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a stretch of wood. Three figures are on the screen. They are climbing up along a sand track. A grown man is ahead. He wears a short -sleeve white T-shirt and blue jeans and holds a blue bag in his hand. Two boys follow him closely. The boy in the middle wears a short-sleeve grey T-shirt and black trousers. The boy behind him wears a black T-shirt without sleeves and camouflage pants. The camera moves upwards gradually with their steps.\nThen, the camera switches to a low angle, shoots the tops of the trees. The camera moves slighlty towards right. The sky is grey. Then, the camera switches again. This time it switches to a high angle. On the screen is cracked ground. A Daddy Longlegs Spider is moving on the ground.\nThen, the camera switches perspective again, shows the three figures have already walked out of the woods and come to this spread of vast cracked land. The grown man still walks ahead and the two boys follow him. They keep moving forward.", "events": [ { "event": "The video begins as the camera zooms in on a stretch of wood. Three figures are on the screen. They are climbing up along a sand track. A grown man is ahead. He wears a short -sleeve white T-shirt and blue jeans and holds a blue bag in his hand. Two boys follow him closely. The boy in the middle wears a short-sleeve grey T-shirt and black trousers. The boy behind him wears a black T-shirt without sleeves and camouflage pants. The camera moves upwards gradually with their steps.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a stretch of wood.", "type": "camera", "weight": 3 }, { "content": "Three figures are on the screen.", "type": "attribute", "weight": 2 }, { "content": "They are climbing up along a sand track.", "type": "action", "weight": 3 }, { "content": "A grown man is ahead.", "type": "attribute", "weight": 2 }, { "content": "The grown man wears a short -sleeve white T-shirt and blue jeans.", "type": "attribute", "weight": 3 }, { "content": "The grown man holds a blue bag in his hand.", "type": "attribute", "weight": 2 }, { "content": "Two boys follow him closely.", "type": "attribute", "weight": 2 }, { "content": "The boy in the middle wears a short-sleeve grey T-shirt and black trousers.", "type": "attribute", "weight": 3 }, { "content": "The boy behind him wears a black T-shirt without sleeves and camouflage pants.", "type": "attribute", "weight": 3 }, { "content": "The camera moves with their steps.", "type": "camera", "weight": 3 }, { "content": "The camera moves upwards gradually.", "type": "camera", "weight": 3 } ] }, { "event": "Then, the camera switches to a low angle, shoots the tops of the trees. The camera moves slighlty towards right. The sky is grey. Then, the camera switches again. This time it switches to a high angle. On the screen is cracked ground. A Daddy Longlegs Spider is moving on the ground.", "visual_elements": [ { "content": "The camera switches to a low angle, shoots the tops of the trees.", "type": "camera", "weight": 3 }, { "content": "The camera moves slighlty towards right.", "type": "camera", "weight": 3 }, { "content": "The sky is grey.", "type": "scene", "weight": 2 }, { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "This time it switches to a high angle.", "type": "camera", "weight": 3 }, { "content": "On the screen is cracked land.", "type": "attribute", "weight": 2 }, { "content": "A Daddy Longlegs Spider is moving on the ground.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera switches perspective again, shows the three figures have already walked out of the woods and come to this spread of vast cracked land. The grown man still walks ahead and the two boys follow him. They keep moving forward.", "visual_elements": [ { "content": "Then, the camera switches perspective again.", "type": "camera", "weight": 3 }, { "content": "It shows the three figures have already walked out of the woods.", "type": "attribute", "weight": 2 }, { "content": "The three men come to this spread of vast cracked land.", "type": "attribute", "weight": 2 }, { "content": "The grown man still walks ahead.", "type": "action", "weight": 3 }, { "content": "The two boys follow him.", "type": "action", "weight": 3 }, { "content": "The three keep moving forward.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 24 }, { "index": "TUNA_0284", "video_path": "VELOCITI/v_IFVWE5XHflo_seg_125_135.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a man who wears a black jacket, a pair of sungasses and a helmet. He has a pistol in his hand. The muzzle points to the front. In the background is a shelf. Some glasses are put on it. The light is dim.\nThen, The camera switches to a man who wears a long-sleeve grey cothes. He holds a basket loaded with bottles with shaking hands. He and the man in black stand face to face.\nThen the camera switches back to the man in black. His left hand which holds the pistol moves down slowly. Suddenly the man in black moves the pistol upwards towards the man in long-sleeve grey clothes and shoots him. The camera switches to the perspective of the man in black. The man in long-sleeve grey clothes is shot twice in the chest. He spreads his arms and his body falls down backwards along some pilled cartons on the ground. The camera finallly zooms in on the back of the man in black leaving the house in the corridor.", "events": [ { "event": "The video begins as the camera zooms in on a man who wears a black jacket, a pair of sungasses and a helmet. He has a pistol in his hand. The muzzle points to the front. In the background is a shelf. Some glasses are put on it. The light is dim.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a man who wears a black jacket, a pair of sungasses and a helmet.", "type": "camera", "weight": 3 }, { "content": "He has a gun in his hand.", "type": "attribute", "weight": 2 }, { "content": "The muzzle points to the front.", "type": "attribute", "weight": 2 }, { "content": "In the background is a shelf.", "type": "scene", "weight": 2 }, { "content": "Some glasses are put on the shelf.", "type": "attribute", "weight": 2 }, { "content": "The light is dim.", "type": "scene", "weight": 1 } ] }, { "event": "Then, The camera switches to a man who wears a long-sleeve grey cothes. He holds a basket loaded with bottles with shaking hands. He and the man in black stand face to face.", "visual_elements": [ { "content": "The camera switches to a man who wears a long-sleeve grey cothes.", "type": "camera", "weight": 3 }, { "content": "The man in long-sleeve grey clothes holds a basket loaded with bottles with shaking hands.", "type": "attribute", "weight": 3 }, { "content": "The man in long-sleeve grey clothes and the man in black stand face to face.", "type": "attribute", "weight": 2 } ] }, { "event": "Then the camera switches back to the man in black. His left hand which holds the pistol moves down slowly. Suddenly the man in black moves the pistol upwards towards the man in long-sleeve grey clothes and shoots him. The camera switches to the perspective of the man in black. The man in long-sleeve grey clothes is shot twice in the chest. He spreads his arms and his body falls down backwards along some pilled cartons on the ground. The camera finallly zooms in on the back of the man in black leaving the house in the corridor.", "visual_elements": [ { "content": "Then the camera switches back to the man in black.", "type": "camera", "weight": 3 }, { "content": "Left hand of the man in black holding the pistol moves down slowly.", "type": "action", "weight": 3 }, { "content": "Suddenly the man in black moves the pistol upwards towards the man in long-sleeve grey clothes and shoots him.", "type": "action", "weight": 3 }, { "content": "The camera switches to the perspective of the man in black.", "type": "camera", "weight": 3 }, { "content": "The man in long-sleeve grey clothes is shot twice in the chest.", "type": "attribute", "weight": 2 }, { "content": "The man in long-sleeve grey clothes spreads his arms and his body falls down backwards along some pilled cartons on the ground.", "type": "action", "weight": 3 }, { "content": "The camera finallly zooms in on the back of the man in black leaving the house in the corridor.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0285", "video_path": "VELOCITI/v_IZQFJ6hZNJc_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a young man in a black coat and a plaid scarf. He stands in front of the counter of a fast-food restaurant. A menu board and some kitchen equipments can be seen in the background. The camera moves downwards with the man's action of squating. The man squats beside a seat that nears a wooden partition.\nThen, the camera switches to another man in red plaid uniform. He sits near the window with his hands supporting his chin. Streets and passengers can be seen outside the window.\nThe camera switches perspective again and shows a man who wears a grey coat, a dark scarf and a striped woolen hat. He has a shotgun in his hand and stands inside the entrance of the fast-food restaurant. Behind him sits a man in a black coat and a black hat. The streets and some vehicles can be seen outside the window in the background.\nThe camera switches perspective and shows an employee in red plaid uniform. The man wears a blue apron and his hands are moving.\nThe camera switches back to the man holding the shotgun. He turns around and talks in the restaurant. There are two paintings hanging on the wall in the background.\nThen the camera switches to a boy sitting on the floor. He wears a white long T-shirt with black sleeves. The boy first holds his head with his hands and then covers his ears with his hands.", "events": [ { "event": "The video begins as the camera zooms in on a young man in a black coat and a plaid scarf. He stands in front of the counter of a fast-food restaurant. A menu board and some kitchen equipments can be seen in the background. The camera moves downwards with the man's action of squating. The man squats beside a seat that nears a wooden partition.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a young man in a black coat and a plaid scarf.", "type": "camera", "weight": 3 }, { "content": "The man stands in front of the counter of a fast-food restaurant.", "type": "attribute", "weight": 2 }, { "content": "A menu board and some kitchen equipments can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "The camera moves downwards with the man's action of squating.", "type": "camera", "weight": 3 }, { "content": "The man squats beside a seat that nears a wooden partition.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches to another man in red plaid uniform. He sits near the window with his hands supporting his chin. Streets and passengers can be seen outside the window.", "visual_elements": [ { "content": "The camera sitches to another man in red plaid uniform.", "type": "camera", "weight": 3 }, { "content": "The man sits near the window with his hands supporting his chin.", "type": "action", "weight": 3 }, { "content": "Streets and passengers can be seen outside the window.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches perspective again and shows a man who wears a grey coat, a dark scarf and a striped woolen hat. He has a shotgun in his hand and stands inside the entrance of the fast-food restaurant. Behind him sits a man in a black coat and a black hat. The streets and some vehicles can be seen outside the window in the background.", "visual_elements": [ { "content": "The camera switches perspective again.", "type": "camera", "weight": 3 }, { "content": "It shows a man who wears a grey coat, a dark scarf and a striped woolen hat.", "type": "attribute", "weight": 2 }, { "content": "He has a shotgun in his hand.", "type": "attribute", "weight": 3 }, { "content": "The man stands inside the entrance of the fast-food restaurant.", "type": "attribute", "weight": 1 }, { "content": "Behind the man with a shotgun in hands sits a man in a black coat and a black hat.", "type": "attribute", "weight": 2 }, { "content": "The streets and some vehicles can be seen outside the window in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches perspective and shows an employee in red plaid uniform. The man wears a blue apron and his hands are moving.", "visual_elements": [ { "content": "The camera switches perspective.", "type": "camera", "weight": 3 }, { "content": "The camera shows an employee in red plaid uniform.", "type": "attribute", "weight": 2 }, { "content": "The man wears a blue apron.", "type": "attribute", "weight": 2 }, { "content": "The man's hands are moving.", "type": "action", "weight": 1 } ] }, { "event": "The camera switches back to the man holding the shotgun. He turns around and talks in the restaurant. There are two paintings hanging on the wall in the background.", "visual_elements": [ { "content": "The camera switches back to the man holding the shotgun.", "type": "camera", "weight": 3 }, { "content": "The man holding the shotgun turns around and talks in the restaurant.", "type": "action", "weight": 3 }, { "content": "There are two paintings hanging on the wall in the background.", "type": "scene", "weight": 2 } ] }, { "event": "Then the camera switches to a boy sitting on the floor. He wears a white long T-shirt with black sleeves. The boy first holds his head with his hands and then covers his ears with his hands.", "visual_elements": [ { "content": "Then the camera switches to a boy sitting on the floor.", "type": "camera", "weight": 3 }, { "content": "The boy wears a white long T-shirt with black sleeves.", "type": "attribute", "weight": 2 }, { "content": "The boy first holds his head with his hands.", "type": "action", "weight": 3 }, { "content": "The boy then covers his ears with his hands.", "type": "action", "weight": 2 } ] } ], "n_events": 6, "n_elements": 25 }, { "index": "TUNA_0286", "video_path": "VELOCITI/v_IZQFJ6hZNJc_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a man who wears a grey trenchcoat and a colored striped hat. He stands in front of the counter of a fast-food restaurant. The menus and sign of the restaurant can be seen in the background. Then, the camera switches to another man in red plaid uniform. The window of the restaurant and a poster on the wall are in the background.\nThen the camera switches to the screen of two men's confrontation. The man in the grey trenchcoat holds a shotgun in his hands. The man in the red plaid uniform hits the shotgun sideways with a stick in both hands. The camera shifts quickly. Another man in a red plaid uniform jumps over the counter rapidly.\nThen, the camera switches to the screen of the two men's close-range fighting in the beginning. The man in the red uniform stumbles down the man in the grey trenchcoat with the stick hardly. The shotgun falls on the floor.\nThe camera closes up on the screen of the shotgun fallen on the ground. Then the man in the red plaid uniform who jumps over the counter picks up the shotgun immediately. The camera moves upwards with his action.\nThe camera switches again to the man in the grey trenchcoat. He lies on the floor and looks to the left. A knife is held in his right hand. The camera returns back to the man in the red plaid uniform who holds the shotgun. He stands behind the counter, points the shotgun at the man that lies on the floor and at the same time is talking.\nAt last, the camera switches again to the man in the grey trenchcoat. He is still lying on the floor, then he turns his head and looks up to the right.", "events": [ { "event": "The video begins as the camera zooms in on a man who wears a grey trenchcoat and a colored striped hat. He stands in front of the counter of a fast-food restaurant. The menus and sign of the restaurant can be seen in the background. Then, the camera switches to another man in red plaid uniform. The window of the restaurant and a poster on the wall are in the background.", "visual_elements": [ { "content": "The camera zooms in on a man who wears a grey trenchcoat and a colored striped hat.", "type": "camera", "weight": 3 }, { "content": "The man stands in front of the counter of a fast-food restaurant.", "type": "attribute", "weight": 2 }, { "content": "The menus and signs of the restaurant can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The camera switches to another man in red plaid uniform.", "type": "camera", "weight": 3 }, { "content": "The window of the restaurant and a poster on the wall are in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Then the camera switches to the screen of two men's confrontation. The man in the grey trenchcoat holds a shotgun in his hands. The man in the red plaid uniform hits the shotgun sideways with a stick in both hands. The camera shifts quickly. Another man in a red plaid uniform jumps over the counter rapidly.", "visual_elements": [ { "content": "The camera switches to the screen of two men's confrontation.", "type": "camera", "weight": 3 }, { "content": "The man in the grey trenchcoat holds a shotgun in his hands.", "type": "action", "weight": 3 }, { "content": "The man in the red plaid uniform hits the shotgun sideways with a stick in both hands.", "type": "action", "weight": 3 }, { "content": "The camera shifts quickly.", "type": "camera", "weight": 3 }, { "content": "Another man in a red plaid uniform jumps over the counter rapidly.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches to the screen of the two men's close-range fighting in the beginning. The man in the red uniform stumbles down the man in the grey trenchcoat with the stick hardly. The shotgun falls on the floor.", "visual_elements": [ { "content": "The camera switches to the screen of the two men's close-range fighting in the beginning.", "type": "camera", "weight": 3 }, { "content": "The man in the red uniform stumbles down the man in the grey trenchcoat with the stick hardly.", "type": "action", "weight": 3 }, { "content": "The shotgun falls on the floor.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera closes up on the screen of the shotgun fallen on the ground. Then the man in the red plaid uniform who jumps over the counter picks up the shotgun immediately. The camera moves upwards with his action.", "visual_elements": [ { "content": "The camera closes up on the screen of the shotgun fallen on the ground.", "type": "camera", "weight": 3 }, { "content": "The man in the red plaid uniform who jumps over the counter picks up the shotgun immediately.", "type": "action", "weight": 3 }, { "content": "The camera moves upwards with his action.", "type": "camera", "weight": 3 } ] }, { "event": "The camera switches again to the man in the grey trenchcoat. He lies on the floor and looks to the left. A knife is held in his right hand. The camera returns back to the man in the red plaid uniform who holds the shotgun. He stands behind the counter, points the shotgun at the man that lies on the floor and at the same time is talking.", "visual_elements": [ { "content": "The camera switches again to the man in the grey trenchcoat.", "type": "camera", "weight": 3 }, { "content": "The man lies on the floor and looks to the left.", "type": "action", "weight": 2 }, { "content": "A knife is held in his right hand.", "type": "attribute", "weight": 3 }, { "content": "The camera returns back to the man in the red plaid uniform who holds the shotgun.", "type": "camera", "weight": 3 }, { "content": "The man in the red plaid uniform stands behind the counter.", "type": "attribute", "weight": 1 }, { "content": "The man points the shotgun at the man that lies on the floor.", "type": "action", "weight": 3 }, { "content": "The man in the red plaid uniform at the same time is talking.", "type": "action", "weight": 2 } ] }, { "event": "At last, the camera switches again to the man in the grey trenchcoat. He is still lying on the floor, then he turns his head and looks up to the right.", "visual_elements": [ { "content": "The camera switches again to the man in the grey trenchcoat.", "type": "camera", "weight": 3 }, { "content": "The man in the grey trenchcoat is still lying on the floor.", "type": "action", "weight": 2 }, { "content": "Then the man in the grey trenchcoat turns his head and looks up to the right.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 26 }, { "index": "TUNA_0287", "video_path": "VELOCITI/v_JBLwuC2gHVQ_seg_85_95.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a blonde woman and a brown-haired woman. The blonde woman wears light-colored clothes, and the brown-haired woman wears a pink camisole. A strench of green plants is in the background. The sky is bluish-green.\nThen, the camera switches perspective and shows more figures. A man with curly hair and a short-haired man appear on the screen. The curly-haired man wears a short-sleeve beige shirt, and the short-haired man wears a pale blue shirt.\nThe camera switches the screen. The brown-haired woman and the curly-haired man support the blonde woman to bend towards the ground. Then the camera moves to the right. The short-haired man holds a transparent bottle. The bottle contains some yellow liquid.\nThen, the camera switches again and shows the screen that the short-haired man pours the liquid in the bottle on the body of the blonde woman. The blonde woman lowers her head and the brown-haired woman beside also lowers her head to comforts the blonde woman.\nThe camera continues to switch the screen and shows that the short-haired man and the curly-haired man are being together. The short-haired man has a knife in his hand and there is blood on the knife.", "events": [ { "event": "The video begins as the camera zooms in on a blonde woman and a brown-haired woman. The blonde woman wears light-colored clothes, and the brown-haired woman wears a pink camisole. A strench of green plants is in the background. The sky is bluish-green.", "visual_elements": [ { "content": "The camera zooms in on a blond woman and a brown-haired woman.", "type": "camera", "weight": 3 }, { "content": "The blond woman wears light-colored clothes.", "type": "attribute", "weight": 1 }, { "content": "The brown-haired woman wears a pink camisole.", "type": "attribute", "weight": 2 }, { "content": "A strench of green plants is in the background.", "type": "scene", "weight": 2 }, { "content": "The sky is bluish-green.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches perspective and shows more figures. A man with curly hair and a short-haired man appear on the screen. The curly-haired man wears a short-sleeve beige shirt, and the short-haired man wears a pale blue shirt.", "visual_elements": [ { "content": "The camera switches perspective.", "type": "camera", "weight": 3 }, { "content": "The camera shows more figures.", "type": "attribute", "weight": 2 }, { "content": "A man with curly hair and a short-haired man appear on the screen.", "type": "attribute", "weight": 2 }, { "content": "The curly-haired man wears a short-sleeve beige shirt.", "type": "attribute", "weight": 2 }, { "content": "The short-haired man wears a pale blue shirt.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches the screen. The brown-haired woman and the curly-haired man support the blonde woman to bend towards the ground. Then the camera moves to the right. The short-haired man holds a transparent bottle. The bottle contains some yellow liquid.", "visual_elements": [ { "content": "The camera switches the screen.", "type": "camera", "weight": 3 }, { "content": "The brown-haired woman and the curly-haired man support the blonde woman to bend towards the ground.", "type": "action", "weight": 3 }, { "content": "Then the camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The short-haired man holds a transparent bottle.", "type": "action", "weight": 3 }, { "content": "The bottle contains some yellow liquid.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, the camera switches again and shows the screen that the short-haired man pours the liquid in the bottle on the body of the blonde woman. The blonde woman lowers her head and the brown-haired woman beside also lowers her head to comforts the blonde woman.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The camera shows the screen that the short-haired man pours the liquid in the bottle on the body of the blonde woman.", "type": "action", "weight": 3 }, { "content": "The blonde woman lowers her head.", "type": "attribute", "weight": 2 }, { "content": "The brown-haired woman beside also lowers her head to comforts the blonde woman.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to switch the screen and shows that the short-haired man and the curly-haired man are being together. The short-haired man has a knife in his hand and there is blood on the knife.", "visual_elements": [ { "content": "The camera continues to switch.", "type": "camera", "weight": 3 }, { "content": "And shows that the short-haired man and the curly-haired man are being together.", "type": "attribute", "weight": 2 }, { "content": "The short-haired man has a knife in his hand.", "type": "action", "weight": 3 }, { "content": "There is blood on the knife.", "type": "attribute", "weight": 2 } ] } ], "n_events": 5, "n_elements": 23 }, { "index": "TUNA_0288", "video_path": "VELOCITI/v_JZtErr7VLKE_seg_85_95.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on the close up of a man's face. He wears a red coat and a yellow shirt. The background is blurred. Then the camera switches to an outdoor scene. A man in a pale blue suit opens the left front door of a black-and-white police car, and sits on the driver's seat. There is a buliding with letters \"BUS\" on it in the background.\nThen, the camera switches to another driving police car. The man in the car wears a dark suit and a hat, and black sunglasses and gloves. he sits on the driver's seat with his hands manufacturing the steering wheel. He looks out of the window. The camera shoots from the car window.\nThen, the camera switches to the first police car. The man in the pale blue suit sits on the driver's seat, and he waves outwards from the car window. Then the police car drives to the left of the screen. There are letters \"BAKER\" and \"WE SERVE AND PROTECT\" printed on the police car. At the same time, another police car drives to the right of the screen. Finally, the screen freezes on the letters \"SHER\" on the car.\nThen, the camera switches to the inner side of a driving car. The man inside the car wears a pink jacket. His hands hold the steering wheel and he looks straight ahead. The background is a stretch of desolate desert. The car drives on the road. The camera shoots sideways on the man's side face and his action of driving.", "events": [ { "event": "The video begins as the camera zooms in on the close up of a man's face. He wears a red coat and a yellow shirt. The background is blurred. Then the camera switches to an outdoor scene. A man in a pale blue suit opens the left front door of a black-and-white police car, and sits on the driver's seat. There is a buliding with letters \"BUS\" on it in the background.", "visual_elements": [ { "content": "The camera zooms in on the close up of a man's face.", "type": "camera", "weight": 3 }, { "content": "The man wears a red coat and a yellow shirt.", "type": "attribute", "weight": 1 }, { "content": "The background is blurred.", "type": "scene", "weight": 1 }, { "content": "Then the camera switches to an outdoor scene.", "type": "camera", "weight": 3 }, { "content": "A man in a pale blue suit opens the left front door of a black-and-white police car.", "type": "action", "weight": 3 }, { "content": "The man in pale blue suit sits on the driver's seat.", "type": "action", "weight": 3 }, { "content": "There is a buliding with letters \"BUS\" on it in the background.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to another driving police car. The man in the car wears a dark suit and a hat, and black sunglasses and gloves. he sits on the driver's seat with his hands manufacturing the steering wheel. He looks out of the window. The camera shoots from the car window.", "visual_elements": [ { "content": "The camera switches to another driving police car.", "type": "camera", "weight": 3 }, { "content": "The man in the car wears a dark suit and a dark hat.", "type": "attribute", "weight": 2 }, { "content": "The man in the car wears black sunglasses and gloves.", "type": "attribute", "weight": 2 }, { "content": "The man sits on the driver's seat with his hands manufacturing the steering wheel.", "type": "attribute", "weight": 1 }, { "content": "The car window has no glass.", "type": "attribute", "weight": 1 }, { "content": "The man looks out of the window.", "type": "action", "weight": 2 }, { "content": "The camera shoots from the car window.", "type": "camera", "weight": 3 } ] }, { "event": "Then, the camera switches to the first police car. The man in the pale blue suit sits on the driver's seat, and he waves outwards from the car window. Then the police car drives to the left of the screen. There are letters \"BAKER\" and \"WE SERVE AND PROTECT\" printed on the police car. At the same time, another police car drives to the right of the screen. Finally, the screen freezes on the letters \"SHER\" on the car.", "visual_elements": [ { "content": "The camera switches to the first police car.", "type": "camera", "weight": 3 }, { "content": "The man in the pale blue suit sits on the driver's seat.", "type": "attribute", "weight": 2 }, { "content": "The man in the pale blue suit waves outwards from the car window.", "type": "action", "weight": 3 }, { "content": "The police car drives to the left of the screen.", "type": "attribute", "weight": 2 }, { "content": "There are letters \"BAKER\" and \"WE SERVE AND PROTECT\" printed on the police car.", "type": "attribute", "weight": 2 }, { "content": "Another police car drives to the right of the screen.", "type": "attribute", "weight": 2 }, { "content": "The screen freezes on the letters \"SHER\" on the car.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to the inner side of a driving car. The man inside the car wears a pink jacket. His hands hold the steering wheel and he looks straight ahead. The background is a stretch of desolate desert. The car drives on the road. The camera shoots sideways on the man's side face and his action of driving.", "visual_elements": [ { "content": "The camera switches to the inner side of a driving car.", "type": "camera", "weight": 3 }, { "content": "The man inside the car wears a pink jacket.", "type": "attribute", "weight": 2 }, { "content": "The man's hands hold the steering wheel.", "type": "action", "weight": 3 }, { "content": "The man looks straight ahead.", "type": "action", "weight": 2 }, { "content": "The background is a stretch of desolate desert.", "type": "scene", "weight": 2 }, { "content": "The car drives on the road.", "type": "attribute", "weight": 1 }, { "content": "The camera shoots sideways on the man's side face and his action of driving.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0289", "video_path": "VELOCITI/v_JcAdeY9KlpE_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on the bottom of a metal white table. The table legs are entangled with a brown leather belt. Red-brick floor and severals steps can be seen in the background. Then, the camera switches to a woman who wears a brown coat with white collars. She wears a pair of gold earrings and a necklace. A white fence and some green plants are in the background. Her hands are moving.\nAfter that, the camera switches again and shows a man who wears a grey suit, a white shirt and a striped tie. He sits beside the table. Several cups of lemon juice and some papers are on the table. His hands wave in the air. Then he lowers his head to write on the paper. Then, a big dog runs towards him from the left of the screen. It shoves the man's arm with which he is writting with its head and wags its tail. The man strokes the head of the dog with his hand. Then the dog's head is pushed down by the man. Several flowerpots and some plants can be seen in the background.\nThe camera switches again and shows a man who wears a black suit, a light-colored striped shirt and a striped tie. He lifts his right hand with which he holds a little ball. He talks as he spinning the little ball.\nAt last, the camera switches to a woman in a black dress. She is on the right behind the man in the grey suit. She runs out of the house with a piece of yellow cloth in her right hand. She walks towards the table.", "events": [ { "event": "The video begins as the camera zooms in on the bottom of a metal white table. The table legs are entangled with a brown leather belt. Red-brick floor and severals steps can be seen in the background. Then, the camera switches to a woman who wears a brown coat with white collars. She wears a pair of gold earrings and a necklace. A white fence and some green plants are in the background. Her hands are moving.", "visual_elements": [ { "content": "The camera zooms in on the bottom of a metal white table.", "type": "camera", "weight": 3 }, { "content": "The table legs are entangled with a brown leather belt.", "type": "attribute", "weight": 2 }, { "content": "Red-brick floor and severals steps can be seen in the background.", "type": "attribute", "weight": 2 }, { "content": "The camera switches to a woman who wears a brown coat with white collars.", "type": "camera", "weight": 3 }, { "content": "The woman wears a pair of gold earrings and a necklace.", "type": "attribute", "weight": 2 }, { "content": "A white fence and some green plants are in the background.", "type": "scene", "weight": 2 }, { "content": "The woman's hands are moving.", "type": "action", "weight": 3 } ] }, { "event": "After that, the camera switches again and shows a man who wears a grey suit, a white shirt and a striped tie. He sits beside the table. Several cups of lemon juice and some papers are on the table. His hands wave in the air. Then he lowers his head to write on the paper. Then, a big dog runs towards him from the left of the screen. It shoves the man's arm with which he is writting with its head and wags its tail. The man strokes the head of the dog with his hand. Then the dog's head is pushed down by the man. Several flowerpots and some plants can be seen in the background.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The camera shows a man who wears a grey suit, a white shirt and a striped tie.", "type": "attribute", "weight": 3 }, { "content": "The man sits beside the table.", "type": "attribute", "weight": 2 }, { "content": "Several cups of lemon juice and some papers are on the table.", "type": "attribute", "weight": 2 }, { "content": "The man's hands wave in the air.", "type": "action", "weight": 3 }, { "content": "The man lowers his head to write on the paper.", "type": "action", "weight": 3 }, { "content": "A big dog runs towards him from the left of the screen.", "type": "action", "weight": 3 }, { "content": "The dog shoves the man's arm with which he is writting with its head.", "type": "action", "weight": 2 }, { "content": "The big dog wags its tail.", "type": "action", "weight": 3 }, { "content": "The man strokes the head of the dog with his hand.", "type": "action", "weight": 2 }, { "content": "The dog's head is pushed down by the man.", "type": "action", "weight": 1 }, { "content": "Several flowerpots and some plants can be seen in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches again and shows a man who wears a black suit, a light-colored striped shirt and a striped tie. He lifts his right hand with which he holds a little ball. He talks as he spinning the little ball.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The camera shows a man who wears a black suit, a light-colored striped shirt and a striped tie.", "type": "attribute", "weight": 2 }, { "content": "The man lifts his right hand with which he holds a little ball.", "type": "action", "weight": 3 }, { "content": "The man talks as he spinning the little ball.", "type": "action", "weight": 2 } ] }, { "event": "At last, the camera switches to a woman in a black dress. She is on the right behind the man in the grey suit. She runs out of the house with a piece of yellow cloth in her right hand. She walks towards the table.", "visual_elements": [ { "content": "The camera switches to a woman in a black dress.", "type": "camera", "weight": 3 }, { "content": "The woman is on the right behind the man in the grey suit.", "type": "attribute", "weight": 2 }, { "content": "The woman runs out of the house.", "type": "action", "weight": 3 }, { "content": "The woman has a piece of yellow cloth in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The woman walks towards the table.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0290", "video_path": "VELOCITI/v_JcAdeY9KlpE_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a man and a woman. They sit beside the dinning table outdoors. The man wears a dark suit and a light-colored striped shirt. He ties a necktie. The woman has short hair and she wears a brown coat with white collars. The woman wears a pair of pearl earrings and a necklace. The man wears a watch on his left hand and holds a little blue ball in his right hand. The woman crosses her hands on the table. Several cups of drinks and some dinning wares are on the table. A stretch of green lawn and some trees are in the background. There are a house, a white fence and green longues far away.\nThen, the camera switches to another man and another woman. The man wears a grey suit and ties a necktie, and he sits near the table with a pen in his hand. The woman stands on the right of the man. She wears a black dress with red and light green patterns on it. A light blue house is in the background. Some flowerpots and plants are in front of the house. The man looks up at the woman. The woman puts a hand near her face. They are talking in this period.\nThe camera switches back to the man and the woman appear at first. The woman's left hand knocks down a glass of juice on the table. She wears a watch on the wrist and a ring on her finger. Then the camera moves upwards and closes up on the woman's expression and actions. She first puts her palms together, then says sorry with her hands crossed in front of her bosom.\nThen, the camera switches to the standing woman. The camera close up to the woman's action. She talks as she walks towards the table.", "events": [ { "event": "The video begins as the camera zooms in on a man and a woman. They sit beside the dinning table outdoors. The man wears a dark suit and a light-colored striped shirt. He ties a necktie. The woman has short hair and she wears a brown coat with white collars. The woman wears a pair of pearl earrings and a necklace. The man wears a watch on his left hand and holds a little blue ball in his right hand. The woman crosses her hands on the table. Several cups of drinks and some dinning wares are on the table. A stretch of green lawn and some trees are in the background. There are a house, a white fence and green longues far away.", "visual_elements": [ { "content": "The camera zooms in on a man and a woman.", "type": "camera", "weight": 3 }, { "content": "The man and the woman sit beside the dinning table outdoors.", "type": "attribute", "weight": 3 }, { "content": "The man wears a dark suit and a light-colored striped shirt.", "type": "attribute", "weight": 3 }, { "content": "The man ties a necktie.", "type": "attribute", "weight": 3 }, { "content": "The woman has short hair and she wears a brown coat.", "type": "attribute", "weight": 3 }, { "content": "The coat has white collars.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a pair of pearl earrings and a necklace.", "type": "attribute", "weight": 2 }, { "content": "The man wears a watch on his left hand.", "type": "attribute", "weight": 1 }, { "content": "The man holds a little blue ball in his right hand.", "type": "attribute", "weight": 1 }, { "content": "The woman crosses her hands on the table.", "type": "action", "weight": 2 }, { "content": "Several cups of drinks and some dinning wares are on the table.", "type": "attribute", "weight": 2 }, { "content": "A stretch of green lawn and some trees are in the background.", "type": "scene", "weight": 2 }, { "content": "There are a house, a white fence and green longues far away.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to another man and another woman. The man wears a grey suit and ties a necktie, and he sits near the table with a pen in his hand. The woman stands on the right of the man. She wears a black dress with red and light green patterns on it. A light blue house is in the background. Some flowerpots and plants are in front of the house. The man looks up at the woman. The woman puts a hand near her face. They are talking in this period.", "visual_elements": [ { "content": "The camera switches to another man and another woman.", "type": "camera", "weight": 3 }, { "content": "The man sits near the table.", "type": "attribute", "weight": 2 }, { "content": "The man wears a grey suit.", "type": "attribute", "weight": 3 }, { "content": "The man ties a necktie.", "type": "attribute", "weight": 3 }, { "content": "The man has a pen in his hand.", "type": "attribute", "weight": 3 }, { "content": "The woman stands on the right of the man.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a black dress.", "type": "attribute", "weight": 3 }, { "content": "The dress has red and light green patterns on it.", "type": "attribute", "weight": 3 }, { "content": "A light blue house is in the background.", "type": "scene", "weight": 2 }, { "content": "Some flowerpots and plants are in front of the house.", "type": "scene", "weight": 2 }, { "content": "The man looks up at the woman.", "type": "action", "weight": 2 }, { "content": "The woman puts a hand near her face.", "type": "action", "weight": 2 }, { "content": "The man and the woman are talking in this period.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches back to the man and the woman appear at first. The woman's left hand knocks down a glass of juice on the table. She wears a watch on the wrist and a ring on her finger. Then the camera moves upwards and closes up on the woman's expression and actions. She first puts her palms together, then says sorry with her hands crossed in front of her bosom.", "visual_elements": [ { "content": "The camera switches back to the man and the woman appear at first.", "type": "camera", "weight": 3 }, { "content": "The woman's left hand knocks down a glass of juice on the table.", "type": "action", "weight": 3 }, { "content": "The woman wears a watch on the wrist and a ring on her finger.", "type": "attribute", "weight": 2 }, { "content": "The camera moves upwards.", "type": "camera", "weight": 3 }, { "content": "The camera closes up on the woman's expression and actions.", "type": "camera", "weight": 3 }, { "content": "The woman first puts her palms together, then crosses her hands in front of her bosom.", "type": "action", "weight": 3 }, { "content": "The woman says sorry.", "type": "action", "weight": 2 } ] }, { "event": "Then, the camera switches to the standing woman. The camera close up to the woman's action. She talks as she walks towards the table.", "visual_elements": [ { "content": "The camera switches to the standing woman.", "type": "camera", "weight": 3 }, { "content": "The camera close up to the woman's action.", "type": "camera", "weight": 3 }, { "content": "She talks as she walks towards the table.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 36 }, { "index": "TUNA_0291", "video_path": "VELOCITI/v_JcAdeY9KlpE_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera focused on a blonde woman. She wears a black dress with a floral pattern. The background is the outer wall of a house, with a brass lamp hanging on the wall and a white door frame next to it. Then, the camera switches to shoot from the back of the blonde. It can be seen that she bends down to clear the cup. Another woman appears opposite her. She sits at a table in the open air. She wears a light brown coat and a clothes with white collars inside the coat. She wears gold earrings and a necklace. Then she points to the right downward. On the right of the woman in the light brown coat sits a man in a dark suit. The background is the courtyard of a house, surrounded by green plants and brick walls.\nThen, the camera switches to a close-up of a big dog. The dog is brown and white in color, and it wears a brown collar around its neck. The dog wags its tail and walks around the table. The camera continues to follow the dog's action and turn right. In the screen, the leash on the dog's collar is wrapped between the table and the chair legs. The woman who sits on this chair shows a foot wearing high heels. There is a brown suitcase on the ground next to the chair with metal fasteners.\nThen, the camera switches back to the blonde woman, who is standing next to a man in a gray suit. The man sits at the table with a pen in his hand. The blonde woman holds a broken glass and a yellow object. The man in the gray suit looks up at the blonde woman, and then looks down at the pen and paper in his hand. There are flowerpots and green plants in the background.", "events": [ { "event": "The video begins as the camera focused on a blonde woman. She wears a black dress with a floral pattern. The background is the outer wall of a house, with a brass lamp hanging on the wall and a white door frame next to it. Then, the camera switches to shoot from the back of the blonde. It can be seen that she bends down to clear the cup. Another woman appears opposite her. She sits at a table in the open air. She wears a light brown coat and a clothes with white collars inside the coat. She wears gold earrings and a necklace. Then she points to the right downward. On the right of the woman in the light brown coat sits a man in a dark suit. The background is the courtyard of a house, surrounded by green plants and brick walls.", "visual_elements": [ { "content": "The video begins as the camera focused on a blonde woman.", "type": "camera", "weight": 3 }, { "content": "She wears a black dress with a floral pattern.", "type": "attribute", "weight": 2 }, { "content": "The background is the outer wall of a house.", "type": "scene", "weight": 2 }, { "content": "A brass lamp hangs on the wall.", "type": "scene", "weight": 1 }, { "content": "A white door frame is next to it.", "type": "scene", "weight": 2 }, { "content": "The camera switches to shoot from the back of the blonde.", "type": "camera", "weight": 3 }, { "content": "It can be seen that she bends down to clear the cup.", "type": "action", "weight": 3 }, { "content": "Another woman appears opposite her.", "type": "attribute", "weight": 2 }, { "content": "She sits at a table in the open air.", "type": "attribute", "weight": 2 }, { "content": "She wears a light brown coat.", "type": "attribute", "weight": 3 }, { "content": "She wears a clothes with white collars inside the coat.", "type": "attribute", "weight": 2 }, { "content": "She wears gold earrings and a necklace.", "type": "attribute", "weight": 2 }, { "content": "Then she points to the right downward.", "type": "action", "weight": 2 }, { "content": "On the right of the woman in the light brown coat sits a man in a dark suit.", "type": "attribute", "weight": 2 }, { "content": "The background is the courtyard of a house.", "type": "scene", "weight": 1 }, { "content": "With green plants and brick walls surrouded.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to a close-up of a big dog. The dog is brown and white in color, and it wears a brown collar around its neck. The dog wags its tail and walks around the table. The camera continues to follow the dog's action and turn right. In the screen, the leash on the dog's collar is wrapped between the table and the chair legs. The woman who sits on this chair shows a foot wearing high heels. There is a brown suitcase on the ground next to the chair with metal fasteners.", "visual_elements": [ { "content": "Then, the camera switches to a close-up of a big dog.", "type": "camera", "weight": 3 }, { "content": "The dog is brown and white in color.", "type": "attribute", "weight": 3 }, { "content": "It wears a brown collar around its neck.", "type": "attribute", "weight": 2 }, { "content": "The dog wags its tail and walks around the table.", "type": "action", "weight": 3 }, { "content": "The camera continues to follow the dog's action and turn right.", "type": "camera", "weight": 3 }, { "content": "In the screen, the leash on the dog's collar is wrapped between the table and the chair legs.", "type": "attribute", "weight": 3 }, { "content": "The woman who sits on this chair shows a foot wearing high heels.", "type": "attribute", "weight": 2 }, { "content": "There is a brown suitcase on the ground next to the chair.", "type": "attribute", "weight": 2 }, { "content": "There are metal fasteners on the suitcase.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera switches back to the blonde woman, who is standing next to a man in a gray suit. The man sits at the table with a pen in his hand. The blonde woman holds a broken glass and a yellow object. The man in the gray suit looks up at the blonde woman, and then looks down at the pen and paper in his hand. There are flowerpots and green plants in the background.", "visual_elements": [ { "content": "Then, the camera switches back to the blonde woman.", "type": "camera", "weight": 3 }, { "content": "The blonde stands next to a man in a gray suit.", "type": "attribute", "weight": 3 }, { "content": "The man sits at the table.", "type": "attribute", "weight": 2 }, { "content": "The man has a pen in his hand.", "type": "attribute", "weight": 2 }, { "content": "The blonde woman holds a broken glass and a yellow object.", "type": "attribute", "weight": 2 }, { "content": "The man in the gray suit looks up at the blonde woman.", "type": "action", "weight": 2 }, { "content": "Then the man in grey suit looks down at the pen and paper in his hand.", "type": "action", "weight": 2 }, { "content": "There are flowerpots and green plants in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 33 }, { "index": "TUNA_0292", "video_path": "VELOCITI/v_Je4QCA5KCuc_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a modern office with a city landscape with lots of high buildings in the background. A woman in a gray suit stands on the left side of the picture. She has a folder in her hand. In front of her is a black chair and a small table with files on it. In front of the woman is a man in a black suit with a cup in his left hand.\nThe camera turns right with the two person's steps. The woman sits in a black chair. The man walks to the right of the room. On the right is a telescope placed on a tripod. The man walked to the tripod while talking, and the woman takes a pen to record on the document. The light carpet on the floor can be seen in the background. Finally, the camera switches to the close-up of the woman, who sits in the chair and talks while recording.", "events": [ { "event": "The video begins as the camera zooms in on a modern office with a city landscape with lots of high buildings in the background. A woman in a gray suit stands on the left side of the picture. She has a folder in her hand. In front of her is a black chair and a small table with files on it. In front of the woman is a man in a black suit with a cup in his left hand.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a modern office.", "type": "camera", "weight": 3 }, { "content": "A city landscape with lots of high buildings is in the background.", "type": "scene", "weight": 1 }, { "content": "A woman in a gray suit stands on the left side of the picture.", "type": "attribute", "weight": 2 }, { "content": "The woman has a folder in her hand.", "type": "attribute", "weight": 2 }, { "content": "In front of the woman is a black chair and a small table with files on it.", "type": "attribute", "weight": 2 }, { "content": "In front of the woman is a man in a black suit.", "type": "attribute", "weight": 3 }, { "content": "The man has a cup in his left hand.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera turns right with the two person's steps. The woman sits in a black chair. The man walks to the right of the room. On the right is a telescope placed on a tripod. The man walked to the tripod while talking, and the woman takes a pen to record on the document. The light carpet on the floor can be seen in the background. Finally, the camera switches to the close-up of the woman, who sits in the chair and talks while recording.", "visual_elements": [ { "content": "The camera turns right with the two person's steps.", "type": "camera", "weight": 3 }, { "content": "The woman sits in a black chair.", "type": "action", "weight": 3 }, { "content": "The man walks to the right of the room.", "type": "action", "weight": 3 }, { "content": "On the right is a telescope placed on a tripod.", "type": "attribute", "weight": 3 }, { "content": "The man walked to the tripod while talking.", "type": "action", "weight": 3 }, { "content": "The woman takes a pen to record on the document.", "type": "action", "weight": 3 }, { "content": "The light carpet on the floor can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "Finally, the camera switches to the close-up of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman sits in the chair and talks while recording.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0293", "video_path": "VELOCITI/v_Jj6H6tJvRjU_seg_135_145.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "In the beginning of the video, an old brown car appears in the screen. A man in a dark coat and jeans is closing the co-driver's door. On the right side of the man is a young man in a brown coat and jeans. He pulls a young woman in a light sweater and brown trousers and runs forward quickly. On the left side of the car, a woman in a brown coat can also be seen running with him. The background is a dense forest with tall trees and the light is dim.\nThe man pulls the woman's hand and continues to run. The camera follows their movements and captures them passing a big tree. The camera continues to move to the right and captures them running towards an old blue car parked in the forest. The man in the brown coat opens the right door of the back seat and drags the woman into the car. Then another woman in a brown coat and another man in a dark coat run into the screen, and they open the front door. They enter the car and close the door. The right door of the back seat is open.", "events": [ { "event": "In the beginning of the video, an old brown car appears in the screen. A man in a dark coat and jeans is closing the co-driver's door. On the right side of the man is a young man in a brown coat and jeans. He pulls a young woman in a light sweater and brown trousers and runs forward quickly. On the left side of the car, a woman in a brown coat can also be seen running with him. The background is a dense forest with tall trees and the light is dim.", "visual_elements": [ { "content": "In the beginning of the video, an old brown car appears in the screen.", "type": "attribute", "weight": 2 }, { "content": "A man in a dark coat and jeans is closing the co-driver's door.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the man is a young man in a brown coat and jeans.", "type": "attribute", "weight": 3 }, { "content": "The man in a brown coat and jeans pulls a young woman in a light sweater and brown trousers and runs forward quickly.", "type": "action", "weight": 3 }, { "content": "On the left side of the car, a woman in a brown coat can also be seen running with him.", "type": "action", "weight": 1 }, { "content": "The background is a dense forest with tall trees.", "type": "scene", "weight": 2 }, { "content": "The light is dim.", "type": "scene", "weight": 1 } ] }, { "event": "The man pulls the woman's hand and continues to run. The camera follows their movements and captures them passing a big tree. The camera continues to move to the right and captures them running towards an old blue car parked in the forest. The man in the brown coat opens the right door of the back seat and drags the woman into the car. Then another woman in a brown coat and another man in a dark coat run into the screen, and they open the front door. They enter the car and close the door. The right door of the back seat is open.", "visual_elements": [ { "content": "The man pulls the woman's hand and continues to run.", "type": "action", "weight": 3 }, { "content": "The camera follows their movements.", "type": "camera", "weight": 3 }, { "content": "The camera captures them passing a big tree.", "type": "action", "weight": 3 }, { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "The camera captures them running towards an old blue car parked in the forest.", "type": "camera", "weight": 3 }, { "content": "The man in the brown coat opens the right door of the back seat and drags the woman into the car.", "type": "action", "weight": 3 }, { "content": "Then another woman in a brown coat and another man in a dark coat run into the screen, and they open the front door.", "type": "action", "weight": 3 }, { "content": "They enter the car and close the door.", "type": "action", "weight": 3 }, { "content": "The right door of the back seat is open.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0294", "video_path": "VELOCITI/v_JmElZmlYkHU_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, there are two men in the screen. A man in a black suit and a black hat is lying on the grass with a pistol in his hand. Another man, wearing a gray suit, kneels on the ground and grabs the former's shoulders. The background is a tall grass.\nThe camera switches to a close-up, and the man in the gray suit grabs the man in the black suit by the neck from behind. The expression of the man in black suit is painful. His right hand looses the gun to catch the thin rope strangled on his neck. Finally, the man in the black suit falls to the ground. In this period, the whole picture of the man in the gray suit is showed. He wears black sunglasses and has a beard.\nThen, the camera switches to a farther angle. The man in the gray suit passes by the man in the black suit and climbs to the grassland on the left with a crossbow in his hand. The camera moves with his movements.", "events": [ { "event": "At the beginning of the video, there are two men in the screen. A man in a black suit and a black hat is lying on the grass with a pistol in his hand. Another man, wearing a gray suit, kneels on the ground and grabs the former's shoulders. The background is a tall grass.", "visual_elements": [ { "content": "There are two men in the screen.", "type": "attribute", "weight": 2 }, { "content": "A man is in a black suit and a black hat.", "type": "attribute", "weight": 3 }, { "content": "The man in the black suit is lying on the grass with a pistol in his hand.", "type": "action", "weight": 3 }, { "content": "Another man wears a gray suit.", "type": "attribute", "weight": 3 }, { "content": "The man wearing a gray suit kneels on the ground and grabs the former's shoulders.", "type": "action", "weight": 3 }, { "content": "The background is a tall grass.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a close-up, and the man in the gray suit grabs the man in the black suit by the neck from behind. The expression of the man in black suit is painful. His right hand looses the gun to catch the thin rope strangled on his neck. Finally, the man in the black suit falls to the ground. In this period, the whole picture of the man in the gray suit is showed. He wears black sunglasses and has a beard.", "visual_elements": [ { "content": "The camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The man in the gray suit grabs the man in the black suit by the neck from behind.", "type": "action", "weight": 3 }, { "content": "The expression of the man in black suit is painful.", "type": "attribute", "weight": 2 }, { "content": "The man in black suit looses the gun held in his right hand to catch the thin rope strangled on his neck.", "type": "action", "weight": 3 }, { "content": "The man in the black suit falls to the ground.", "type": "action", "weight": 3 }, { "content": "The whole picture of the man in the gray suit is showed.", "type": "attribute", "weight": 2 }, { "content": "The man in the grey suit wears black sunglasses and has a beard.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to a farther angle. The man in the gray suit passes by the man in the black suit and climbs to the grassland on the left with a crossbow in his hand. The camera moves with his movements.", "visual_elements": [ { "content": "The camera switches to a farther angle.", "type": "camera", "weight": 3 }, { "content": "The man in the gray suit passes by the man in the black suit and climbs to the grassland on the left with a crossbow in his hand.", "type": "action", "weight": 3 }, { "content": "The camera moves with the movement of the man in the grey suit.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0295", "video_path": "VELOCITI/v_JqyCEV_iACo_seg_95_105.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a room with a bookshelf filled with all kinds of books and toys. On the right side of the screen is a little boy sitting on the bed. The little boy wears a plaid shirt. In front of him is a woman in a pink coat and light trousers. Her brown hair hangs freely over her shoulders. Next to her is a boy in a dark coat and light trousers. The boy's hair is curly.\nThe camera moves to the left and the woman walks to the center of the room. She turns to face the boy with curly hair. The woman bends down and picks up the boy and walks quickly towards the bed. The camera moves to the right with her movements. The woman puts the boy down and squats on the ground, talking face to face with the curly-haired boy. The light outside the window is bright.\nThen, the woman stands up, turns around and leaves the room. The camera moves to the left following her backview. Then the camera switches to a close-up, showing the feature of her face. She turns around and closes the door.", "events": [ { "event": "The video begins as the camera zooms in on a room with a bookshelf filled with all kinds of books and toys. On the right side of the screen is a little boy sitting on the bed. The little boy wears a plaid shirt. In front of him is a woman in a pink coat and light trousers. Her brown hair hangs freely over her shoulders. Next to her is a boy in a dark coat and light trousers. The boy's hair is curly.", "visual_elements": [ { "content": "The camera zooms in on a room.", "type": "camera", "weight": 3 }, { "content": "A bookshelf is in the room.", "type": "scene", "weight": 2 }, { "content": "The bookshelf is filled with all kinds of books and toys.", "type": "scene", "weight": 2 }, { "content": "On the right side of the screen is a little boy sitting on the bed.", "type": "attribute", "weight": 3 }, { "content": "The little boy wears a plaid shirt.", "type": "attribute", "weight": 2 }, { "content": "In front of the little boy is a woman in a pink coat and light trousers.", "type": "attribute", "weight": 3 }, { "content": "Her brown hair hangs freely over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "Next to the woman is a boy in a dark coat and light trousers.", "type": "attribute", "weight": 3 }, { "content": "The boy's hair is curly.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves to the left and the woman walks to the center of the room. She turns to face the boy with curly hair. The woman bends down and picks up the boy and walks quickly towards the bed. The camera moves to the right with her movements. The woman puts the boy down and squats on the ground, talking face to face with the curly-haired boy. The light outside the window is bright.", "visual_elements": [ { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The woman walks to the center of the room.", "type": "action", "weight": 2 }, { "content": "The woman turns to face the boy with curly hair.", "type": "action", "weight": 3 }, { "content": "The woman bends down and picks up the boy and walks quickly towards the bed.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right with her movements.", "type": "camera", "weight": 3 }, { "content": "The woman puts the boy down.", "type": "action", "weight": 3 }, { "content": "The woman squats on the ground.", "type": "action", "weight": 2 }, { "content": "The woman talks face to face with the curly-haired boy.", "type": "action", "weight": 2 }, { "content": "The light outside the window is bright.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the woman stands up, turns around and leaves the room. The camera moves to the left following her backview. Then the camera switches to a close-up, showing the feature of her face. She turns around and closes the door.", "visual_elements": [ { "content": "Then, the woman stands up.", "type": "action", "weight": 2 }, { "content": "The woman turns around and leaves the room.", "type": "action", "weight": 3 }, { "content": "The camera moves to the left following her backview.", "type": "camera", "weight": 3 }, { "content": "Then the camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The camera shows the feature of the woman's face.", "type": "camera", "weight": 3 }, { "content": "The woman turns around and closes the door.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 24 }, { "index": "TUNA_0296", "video_path": "VELOCITI/v_K0zvX6AGd7Q_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on the door of a yellow taxi. The door is opened. A man in a black suit is preparing to enter the taxi. Behind the man stands a doorman in a uniform and a hat. The doorman closes the door for the man after he enters the car. The city streets, bustling pedestrians, and the glass windows of buildings which reflect the sunlight can be seen in the background.\nNext, the camera switches to the screen that a man is sitting in the back seat of a taxi. The car is decorated with Christmas decorations--colored lights and a garland. When the car starts moving, the man's body leans back under the force of inertia. Then the man opens his mouth and makes a surprised expression.\nThen, the camera switches to the front view of the car. There is a small Christmas tree decoration and a crystal ball on the front windshield of the car. Outside, a truck and other vehicles in front of the car can be seen. The truck compartment is open. Then the camera switches to the close-up of the man again. His eyes are widely open and his right hand is raised up to block his face. Then, the camera switches to the driver's back. The driver wears a yellow hat and a gray coat. The driver is driving.\nFinally, the camera switches to the outside of the car and shoots the taxi turning sharply and drifting at a corner. Two people in front of the truck drop their things to avoid the taxi.", "events": [ { "event": "The video begins as the camera zooms in on the door of a yellow taxi. The door is opened. A man in a black suit is preparing to enter the taxi. Behind the man stands a doorman in a uniform and a hat. The doorman closes the door for the man after he enters the car. The city streets, bustling pedestrians, and the glass windows of buildings which reflect the sunlight can be seen in the background.", "visual_elements": [ { "content": "The camera zooms in on the door of a yellow taxi.", "type": "camera", "weight": 3 }, { "content": "The door is opened.", "type": "attribute", "weight": 2 }, { "content": "A man in a black suit is preparing to enter the taxi.", "type": "action", "weight": 3 }, { "content": "Behind the man stands a doorman in a uniform and a hat.", "type": "attribute", "weight": 2 }, { "content": "The doorman closes the door for the man after he enters the car.", "type": "action", "weight": 3 }, { "content": "The city streets and bustling pedestrians can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The glass windows of the buildings reflect the sunlight.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera switches to the screen that a man is sitting in the back seat of a taxi. The car is decorated with Christmas decorations--colored lights and a garland. When the car starts moving, the man's body leans back under the force of inertia. Then the man opens his mouth and makes a surprised expression.", "visual_elements": [ { "content": "Next, the camera switches to the screen that a man is sitting in the back seat of a taxi.", "type": "camera", "weight": 3 }, { "content": "The car is decorated with Christmas decorations--colored lights and a garland.", "type": "attribute", "weight": 2 }, { "content": "The car starts moving.", "type": "action", "weight": 2 }, { "content": "The man's body leans back under the force of inertia.", "type": "action", "weight": 3 }, { "content": "Then the man opens his mouth.", "type": "action", "weight": 3 }, { "content": "The man makes a surprised expression.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to the front view of the car. There is a small Christmas tree decoration and a crystal ball on the front windshield of the car. Outside, a truck and other vehicles in front of the car can be seen. The truck compartment is open. Then the camera switches to the close-up of the man again. His eyes are widely open and his right hand is raised up to block his face. Then, the camera switches to the driver's back. The driver wears a yellow hat and a gray coat. The driver is driving.", "visual_elements": [ { "content": "Then, the camera switches to the front view of the car.", "type": "camera", "weight": 3 }, { "content": "There is a small Christmas tree decoration and a crystal ball on the front windshield of the car.", "type": "attribute", "weight": 2 }, { "content": "Outside, a truck and other vehicles in front of the car can be seen.", "type": "attribute", "weight": 2 }, { "content": "The truck compartment is open.", "type": "attribute", "weight": 1 }, { "content": "Then the camera switches to the close-up of the man again.", "type": "camera", "weight": 3 }, { "content": "The man's eyes are widely open.", "type": "attribute", "weight": 1 }, { "content": "And his right hand is raised up to block his face.", "type": "action", "weight": 3 }, { "content": "The camera switches to the driver's back.", "type": "camera", "weight": 3 }, { "content": "The driver wears a yellow hat.", "type": "attribute", "weight": 2 }, { "content": "The driver wears a gray coat.", "type": "attribute", "weight": 2 }, { "content": "The driver is driving.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera switches to the outside of the car and shoots the taxi turning sharply and drifting at a corner. Two people in front of the truck drop their things to avoid the taxi.", "visual_elements": [ { "content": "The camera switches to the outside of the car.", "type": "camera", "weight": 3 }, { "content": "The camera shoots the taxi turning sharply and drifting at a corner.", "type": "camera", "weight": 3 }, { "content": "Two people in front of the truck drop their things to avoid the taxi.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 27 }, { "index": "TUNA_0297", "video_path": "VELOCITI/v_K0zvX6AGd7Q_seg_110_120.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on the inside of a taxi decorated with Christmas lights. There are two figures in the car. In the front driver's seat, there is a young man in gray clothes, a yellow hat and a gray scarf. In the back seat, there is another young man who wears a dark suit and a tie. He leans on the car seat. The taxi driver puts his hands with black gloves on the steering wheel. From time to time, he controls the steering wheel with his left hand, and his right hand holds a cigar. He turns slightly to talk to the young man in the back seat. The young man sitting in the back seat has been communicating with the driver. The young man in the back seat rubs his eyes with his left hand.\nThe camera switches to the outside of the car. The yellow taxi is driving on the street. There is snow on the side of the street. The houses beside the street are decorated with colorful lights. On the top step of the house on the right side closest to the camera is a decoration of Santa Claus.", "events": [ { "event": "The video begins as the camera zooms in on the inside of a taxi decorated with Christmas lights. There are two figures in the car. In the front driver's seat, there is a young man in gray clothes, a yellow hat and a gray scarf. In the back seat, there is another young man who wears a dark suit and a tie. He leans on the car seat. The taxi driver puts his hands with black gloves on the steering wheel. From time to time, he controls the steering wheel with his left hand, and his right hand holds a cigar. He turns slightly to talk to the young man in the back seat. The young man sitting in the back seat has been communicating with the driver. The young man in the back seat rubs his eyes with his left hand.", "visual_elements": [ { "content": "The video begins as the camera zooms in on the inside of a taxi decorated with Christmas lights.", "type": "camera", "weight": 3 }, { "content": "There are two figures in the car.", "type": "attribute", "weight": 2 }, { "content": "In the front driver's seat, there is a young man in gray clothes, a yellow hat and a gray scarf.", "type": "attribute", "weight": 3 }, { "content": "In the back seat, there is another young man who wears a dark suit and a tie.", "type": "attribute", "weight": 3 }, { "content": "The man in the dark suit leans on the car seat.", "type": "attribute", "weight": 2 }, { "content": "The taxi driver puts his hands with black gloves on the steering wheel.", "type": "attribute", "weight": 3 }, { "content": "From time to time, the taxi driver controls the steering wheel with his left hand.", "type": "action", "weight": 3 }, { "content": "The taxi driver's right hand holds a cigar and turns slightly to talk to the young man in the back seat.", "type": "action", "weight": 3 }, { "content": "The young man sitting in the back seat has been communicating with the driver.", "type": "action", "weight": 3 }, { "content": "The young man in the back seat rubs his eyes with his left hand.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to the outside of the car. The yellow taxi is driving on the street. There is snow on the side of the street. The houses beside the street are decorated with colorful lights. On the top step of the house on the right side closest to the camera is a decoration of Santa Claus.", "visual_elements": [ { "content": "The camera switches to the outside of the car,", "type": "camera", "weight": 3 }, { "content": "The yellow taxi is driving on the street.", "type": "attribute", "weight": 2 }, { "content": "There is snow on the side of the street.", "type": "attribute", "weight": 1 }, { "content": "The houses beside the street are decorated with colorful lights.", "type": "attribute", "weight": 2 }, { "content": "On the top step of the house on the right side closest to the camera is a decoration of Santa Claus.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0298", "video_path": "VELOCITI/v_K0zvX6AGd7Q_seg_120_130.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera shows a yellow taxi parked on the street with a sign \"CHECKER\" on the roof. The houses on the right side of the street are decorated with colorful Christmas lights.\nThen, the camera switches to the side of the taxi. The door opens and hot air gushes out from the car. A man in a gray coat and a yellow hat gets out of the car. Then he closes the door of the driver’s seat and opens the door of the rear seat. A man in a black suit, a white shirt and a red tie gets out of the car, and the heat also pours out. The man in the black suit waves his hand and takes a deep breath. Then they stand by the car and look at the distance. The camera shoots forward from the right.", "events": [ { "event": "At the beginning of the video, the camera shows a yellow taxi parked on the street with a sign \"CHECKER\" on the roof. The houses on the right side of the street are decorated with colorful Christmas lights.", "visual_elements": [ { "content": "The camera shows a yellow taxi parked on the street.", "type": "camera", "weight": 3 }, { "content": "A sign \"CHECKER\" is on the roof of the car.", "type": "attribute", "weight": 1 }, { "content": "The houses on the right side of the street are decorated with colorful Christmas lights.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to the side of the taxi. The door opens and hot air gushes out from the car. A man in a gray coat and a yellow hat gets out of the car. Then he closes the door of the driver’s seat and opens the door of the rear seat. A man in a black suit, a white shirt and a red tie gets out of the car, and the heat also pours out. The man in the black suit waves his hand and takes a deep breath. Then they stand by the car and look at the distance. The camera shoots forward from the right.", "visual_elements": [ { "content": "Then, the camera switches to the side of the taxi.", "type": "camera", "weight": 3 }, { "content": "The door opens.", "type": "action", "weight": 2 }, { "content": "Hot air gushes out from the car.", "type": "attribute", "weight": 2 }, { "content": "A man in a gray coat and a yellow hat gets out of the car.", "type": "action", "weight": 3 }, { "content": "The man in the grey coat closes the door of the driver’s seat and opens the door of the rear seat.", "type": "action", "weight": 3 }, { "content": "A man in a black suit, a white shirt and a red tie gets out of the car.", "type": "action", "weight": 3 }, { "content": "The heat also pours out.", "type": "attribute", "weight": 2 }, { "content": "The man in the black suit waves his hand and takes a deep breath.", "type": "action", "weight": 3 }, { "content": "They stand by the car and look at the distance.", "type": "action", "weight": 3 }, { "content": "The camera shoots forward from the right.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0299", "video_path": "VELOCITI/v_K8IgSndDsjs_seg_155_165.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a blonde woman wearing a dark coat and a green shirt. The background is a dim environment. The camera switches its perspective and zooms out, showing a blonde woman standing in a shabby scene with garbage and rags scattered on the ground. Opposite the woman is a masked man standing next to a man who fell to the ground.\nThen, the camera switches to another screen from the low angle, showing the masked man wearing a black leather jacket and holding a metal stick and a book in his hand. Then the camera switches to the man who fell to the ground. He wears dark clothes, and his face and clothes are covered with mud.\nThe camera switches back to the masked man again. He slowly squats down, and the camera moves down with his movement. The camera shoots the man who fell to the ground. His expression is painful. The masked man puts his hands on the shoulders of the man who fell to the ground.", "events": [ { "event": "The video begins as the camera zooms in on a blonde woman wearing a dark coat and a green shirt. The background is a dim environment. The camera switches its perspective and zooms out, showing a blonde woman standing in a shabby scene with garbage and rags scattered on the ground. Opposite the woman is a masked man standing next to a man who fell to the ground.", "visual_elements": [ { "content": "The camera zooms in on a blonde woman.", "type": "camera", "weight": 3 }, { "content": "The blonde woman wears a dark coat and a green shirt.", "type": "attribute", "weight": 2 }, { "content": "The background is a dim environment.", "type": "scene", "weight": 1 }, { "content": "The camera switches its perspective and zooms out.", "type": "camera", "weight": 3 }, { "content": "The camera shows a blonde woman standing in a shabby scene.", "type": "scene", "weight": 2 }, { "content": "Garbage and rags are scattered on the ground.", "type": "scene", "weight": 1 }, { "content": "Opposite the woman is a masked man.", "type": "attribute", "weight": 3 }, { "content": "The man stands next to a man who fell to the ground.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to another screen from the low angle, showing the masked man wearing a black leather jacket and holding a metal stick and a book in his hand. Then the camera switches to the man who fell to the ground. He wears dark clothes, and his face and clothes are covered with mud.", "visual_elements": [ { "content": "Then, the camera switches to another screen.", "type": "camera", "weight": 3 }, { "content": "from the low angle", "type": "camera", "weight": 3 }, { "content": "The screen shows the masked man wearing a black leather jacket.", "type": "attribute", "weight": 3 }, { "content": "The masked man holds a metal stick and a book in his hand.", "type": "attribute", "weight": 2 }, { "content": "Then the camera switches to the man who fell to the ground.", "type": "camera", "weight": 3 }, { "content": "The man wears dark clothes.", "type": "attribute", "weight": 2 }, { "content": "His face and clothes are covered with mud.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches back to the masked man again. He slowly squats down, and the camera moves down with his movement. The camera shoots the man who fell to the ground. His expression is painful. The masked man puts his hands on the shoulders of the man who fell to the ground.", "visual_elements": [ { "content": "The camera switches back to the masked man again.", "type": "camera", "weight": 3 }, { "content": "The masked man slowly squats down.", "type": "action", "weight": 2 }, { "content": "The camera moves down with his movement.", "type": "camera", "weight": 3 }, { "content": "The camera shoots the man who fell to the ground.", "type": "camera", "weight": 3 }, { "content": "The man who fell to the ground has a painful expression.", "type": "attribute", "weight": 2 }, { "content": "The masked man puts his hands on the shoulders of the man who fell to the ground.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0300", "video_path": "VELOCITI/v_KBfD-4BCMR0_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera focuses on the upper bodies of two women. They stand outdoors with a stretch of green grassland in the background. The woman on the left wears a dark coat and blue sunglasses. Her hair is blonde. The woman on the right wears a striped vest and a necklace. She has brown hair.\nThen, the camera switches to a white door with a small window. The door opens from the inside and a man pokes his head out behind the door. He wears a white T-shirt and a blue baseball cap, smoking a cigarette in his mouth and holding the door with his hand. There is a pot of green plants by the door. The camera switches back to the outside again. This time the camera aims at another woman. She wears a pink coat, a pair of star-shaped earrings on her ears. She has blond hair. The background is a patch of blurred green plants.\nThe camera switches back to the man again. He walks out of the door completely. He leans forward slightly and looks at his right side to communicate with somebody. Several hats hanging on the wall inside the room can be seen in the screen. Then the camera switches back to the woman in the pink coat again. She is trying hard to explain. The camera finally switches back to the man who holds a can of drink in his right hand.", "events": [ { "event": "The video begins as the camera focuses on the upper bodies of two women. They stand outdoors with a stretch of green grassland in the background. The woman on the left wears a dark coat and blue sunglasses. Her hair is blonde. The woman on the right wears a striped vest and a necklace. She has brown hair.", "visual_elements": [ { "content": "The camera focuses on the upper bodies of two women.", "type": "camera", "weight": 3 }, { "content": "The two women stand in the open air.", "type": "attribute", "weight": 2 }, { "content": "A stretch of green grassland is in the background.", "type": "scene", "weight": 2 }, { "content": "The woman on the left wears a dark coat", "type": "attribute", "weight": 2 }, { "content": "The woman on the left wears blue sunglasses.", "type": "attribute", "weight": 3 }, { "content": "The hair of the woman on the left is blonde.", "type": "attribute", "weight": 3 }, { "content": "The woman on the right wears a striped vest.", "type": "attribute", "weight": 1 }, { "content": "The woman on the right wears a necklace.", "type": "attribute", "weight": 2 }, { "content": "The woman on the right has brown hair.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, the camera switches to a white door with a small window. The door opens from the inside and a man pokes his head out behind the door. He wears a white T-shirt and a blue baseball cap, smoking a cigarette in his mouth and holding the door with his hand. There is a pot of green plants by the door. The camera switches back to the outside again. This time the camera aims at another woman. She wears a pink coat, a pair of star-shaped earrings on her ears. She has blond hair. The background is a patch of blurred green plants.", "visual_elements": [ { "content": "The camera switches to a white door.", "type": "camera", "weight": 3 }, { "content": "A small window is on the door.", "type": "attribute", "weight": 2 }, { "content": "The door opens from the inside.", "type": "attribute", "weight": 2 }, { "content": "A man pokes his head out behind the door.", "type": "action", "weight": 3 }, { "content": "The man wears a white T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The man wears a blue baseball cap.", "type": "attribute", "weight": 2 }, { "content": "The man smokes a cigarette in his mouth.", "type": "attribute", "weight": 2 }, { "content": "The man holds the door with his hand.", "type": "action", "weight": 3 }, { "content": "There is a pot of green plants by the door.", "type": "scene", "weight": 2 }, { "content": "The camera switches back to the outside again.", "type": "camera", "weight": 3 }, { "content": "This time the camera aims at another woman.", "type": "camera", "weight": 3 }, { "content": "The woman wears a pink coat.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a pair of star-shaped earrings on her ears.", "type": "attribute", "weight": 3 }, { "content": "The woman has blond hair.", "type": "attribute", "weight": 3 }, { "content": "The background is a patch of blurred green plants.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches back to the man again. He walks out of the door completely. He leans forward slightly and looks at his right side to communicate with somebody. Several hats hanging on the wall inside the room can be seen in the screen. Then the camera switches back to the woman in the pink coat again. She is trying hard to explain. The camera finally switches back to the man who holds a can of drink in his right hand.", "visual_elements": [ { "content": "The camera switches back to the man again.", "type": "camera", "weight": 3 }, { "content": "The man walks out of the door completely.", "type": "attribute", "weight": 2 }, { "content": "The man leans forward slightly and looks at his right side to communicate with somebody.", "type": "action", "weight": 3 }, { "content": "Several hats hanging on the wall inside the room can be seen in the screen.", "type": "scene", "weight": 1 }, { "content": "The camera switches back to the woman in the pink coat again.", "type": "camera", "weight": 3 }, { "content": "The woman is trying hard to explain.", "type": "action", "weight": 2 }, { "content": "The camera finally switches back to the man.", "type": "camera", "weight": 3 }, { "content": "The man holds a can of drink in his right hand.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 32 }, { "index": "TUNA_0301", "video_path": "VELOCITI/v_KrwlDh465HQ_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a firefighter runs out with a child in his right arm. The firefighter wears a black fire suit and a helmet with a fire axe in his hand. The child wears a long-sleeved shirt with red and white stripes and blue trousers. Smoke and flames can be seen in the background. There are obvious cracks on the wall. The camera moves up to the left, continuously capturing the expressions and actions of the firefighter to rescue the child.\nThen, the camera switches to shoot another firefighter. His fire suit has obvious yellow reflective stripes, and the word \"CHICAGO\" is written on his helmet. His helmet and fire suit are covered with water drops.", "events": [ { "event": "At the beginning of the video, a firefighter runs out with a child in his right arm. The firefighter wears a black fire suit and a helmet with a fire axe in his hand. The child wears a long-sleeved shirt with red and white stripes and blue trousers. Smoke and flames can be seen in the background. There are obvious cracks on the wall. The camera moves up to the left, continuously capturing the expressions and actions of the firefighter to rescue the child.", "visual_elements": [ { "content": "A firefighter in the screen runs out with a child in his right arm.", "type": "camera", "weight": 3 }, { "content": "The firefighter wears a black fire suit.", "type": "attribute", "weight": 3 }, { "content": "The firefighter wears a helmet.", "type": "attribute", "weight": 3 }, { "content": "The firefighter holds a fire axe in his hand.", "type": "attribute", "weight": 3 }, { "content": "The child wears a long-sleeved shirt with red and white stripes and blue trousers.", "type": "attribute", "weight": 3 }, { "content": "Smoke and flames can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "There are obvious cracks on the wall.", "type": "scene", "weight": 1 }, { "content": "The camera moves up to the left.", "type": "camera", "weight": 3 }, { "content": "The camera continuously captures the expressions and actions of the firefighter to rescue the child.", "type": "camera", "weight": 3 } ] }, { "event": "Then, the camera switches to shoot another firefighter. His fire suit has obvious yellow reflective stripes, and the word \"CHICAGO\" is written on his helmet. His helmet and fire suit are covered with water drops.", "visual_elements": [ { "content": "The camera switches the perspective.", "type": "camera", "weight": 3 }, { "content": "The camera shoots another firefighter.", "type": "camera", "weight": 3 }, { "content": "The firefighter's fire suit has obvious yellow reflective stripes.", "type": "attribute", "weight": 2 }, { "content": "The word \"CHICAGO\" is written on his helmet.", "type": "attribute", "weight": 3 }, { "content": "The helmet and fire suit of the firefighter are covered with water drops.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0302", "video_path": "VELOCITI/v_LC1Sb6tRr4E_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a woman wearing floral dress. She is lying on her stomach on a man who lies on the ground. There are colorful hairpins on her hair.\nThen the camera moves upwards. Two persons are running towards her from behind. One of them is a man in a black vest and black and red striped short pants. Another is a woman in a grey suit. They bents to pull up the woman who is lying on her stomach from the ground. The camera moves to the right with their movements. Two women in nurse uniforms are standing by the side and watching them in the background. The nurse on the right of the screen are watching the scene with her mouth widely open. The background is a pure white room.\nThe camera switches again and shows a man in a white coat. He lies on the ground with his coat and red tie in a mass. He looks at the woman in the floral dress and shouts loudly. Then a woman in orange dress walks towards him barefooted. She stretches her hands to support his arm and pulls him up. The camera moves with the doctor's movement.", "events": [ { "event": "The video begins as the camera zooms in on a woman wearing floral dress. She is lying on her stomach on a man who lies on the ground. There are colorful hairpins on her hair.", "visual_elements": [ { "content": "The camera zooms in on a woman wearing floral dress.", "type": "camera", "weight": 3 }, { "content": "She is lying on her stomach on a man who lies on the ground.", "type": "attribute", "weight": 2 }, { "content": "There are colorful hairpins on the woman's hair.", "type": "attribute", "weight": 3 } ] }, { "event": "Then the camera moves upwards. Two persons are running towards her from behind. One of them is a man in a black vest and black and red striped short pants. Another is a woman in a grey suit. They bents to pull up the woman who is lying on her stomach from the ground. The camera moves to the right with their movements. Two women in nurse uniforms are standing by the side and watching them in the background. The nurse on the right of the screen are watching the scene with her mouth widely open. The background is a pure white room.", "visual_elements": [ { "content": "Then the camera moves upwards.", "type": "camera", "weight": 3 }, { "content": "Two persons are running towards the woman lying on her stomach from behind.", "type": "attribute", "weight": 2 }, { "content": "One of them is a man in a black vest and black and red striped short pants.", "type": "attribute", "weight": 2 }, { "content": "Another is a woman in a grey suit.", "type": "attribute", "weight": 2 }, { "content": "They bents to pull up the woman who is lying on her stomach from the ground.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right with their movements.", "type": "camera", "weight": 3 }, { "content": "Two women in nurse uniforms are standing by the side and watching them in the background.", "type": "scene", "weight": 2 }, { "content": "The nurse on the right of the screen are watching the scene with her mouth widely open.", "type": "attribute", "weight": 2 }, { "content": "The background is a pure white room.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches again and shows a man in a white coat. He lies on the ground with his coat and red tie in a mass. He looks at the woman in the floral dress and shouts loudly. Then a woman in orange dress walks towards him barefooted. She stretches her hands to support his arm and pulls him up. The camera moves with the doctor's movement.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The camera shows a man in a white coat.", "type": "attribute", "weight": 3 }, { "content": "The man lies on the ground.", "type": "attribute", "weight": 1 }, { "content": "The man's coat and red tie are in a mass.", "type": "attribute", "weight": 2 }, { "content": "The man looks at the woman in the floral dress and shouts loudly.", "type": "action", "weight": 3 }, { "content": "Then a woman in orange dress walks towards him barefooted.", "type": "action", "weight": 2 }, { "content": "The woman in orange dress stretches her hands to support the man's arm and pulls him up.", "type": "action", "weight": 3 }, { "content": "The camera moves with the doctor's movement.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0303", "video_path": "VELOCITI/v_LRPEYUlE8rU_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a kitchen scene. A young man in a grey jacket and a woolen hat stands in front of a fridge with the door open. Then he closes the door of the fridge and turns around to talk to someone. A phone is hanging on the wall of the kitchen in the background. Some cookers and seasonings are placed on the kitchen's countertop.\nThe camera switches to a long-haired blonde woman. She wears a short-sleeved dark shirt. A chandelier in a floral shape is hanging over her head, emitting warm light. The camera moves downwards, and the woman sits by a round dinning table beside a blind shutter. An old-fashioned radio and some sundries are on the table. Her left arm puts on the table horizontal, and her right arm holds her chin up.\nThe camera switches back to the young man. This time the camera captures the upper body of the man. The camera shoots from the side of the man. The man stands in front of the kitchen counter. On his left is a cupboard of the kitchen. The camera switches to the woman again. She still sits by the table, with her eyes glaring forward and her hand holding her chin. Then she looks down.", "events": [ { "event": "The video begins as the camera zooms in on a kitchen scene. A young man in a grey jacket and a woolen hat stands in front of a fridge with the door open. Then he closes the door of the fridge and turns around to talk to someone. A phone is hanging on the wall of the kitchen in the background. Some cookers and seasonings are placed on the kitchen's countertop.", "visual_elements": [ { "content": "The camera zooms in on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "A young man in a grey jacket and a woolen hat stands in front of a fridge with the door open.", "type": "action", "weight": 3 }, { "content": "The man closes the door of the fridge and turns around to talk to someone.", "type": "action", "weight": 3 }, { "content": "A phone is hanging on the wall of the kitchen in the background.", "type": "scene", "weight": 2 }, { "content": "Some cookers and seasonings are placed on the kitchen's countertop.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to a long-haired blonde woman. She wears a short-sleeved dark shirt. A chandelier in a floral shape is hanging over her head, emitting warm light. The camera moves downwards, and the woman sits by a round dinning table beside a blind shutter. An old-fashioned radio and some sundries are on the table. Her left arm puts on the table horizontal, and her right arm holds her chin up.", "visual_elements": [ { "content": "The camera switches to a long-haired blonde woman.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman wears a short-sleeved dark shirt.", "type": "attribute", "weight": 3 }, { "content": "A chandelier in a floral shape is hanging over the long-haired woman's head, emitting warm light.", "type": "attribute", "weight": 2 }, { "content": "The camera moves downwards.", "type": "camera", "weight": 3 }, { "content": "The woman sits by a round dinning table beside a blind shutter.", "type": "action", "weight": 3 }, { "content": "An old-fashioned radio and some sundries are on the table.", "type": "attribute", "weight": 2 }, { "content": "The long-haired woman puts her left arm on the table horizontally.", "type": "action", "weight": 3 }, { "content": "The long-haired woman's right arm holds her chin up.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches back to the young man. This time the camera captures the upper body of the man. The camera shoots from the side of the man. The man stands in front of the kitchen counter. On his left is a cupboard of the kitchen. The camera switches to the woman again. She still sits by the table, with her eyes glaring forward and her hand holding her chin. Then she looks down.", "visual_elements": [ { "content": "The camera switches back to the young man.", "type": "camera", "weight": 3 }, { "content": "This time the camera captures the upper body of the man.", "type": "camera", "weight": 3 }, { "content": "The camera shoots from the side of the man.", "type": "camera", "weight": 3 }, { "content": "The man stands in front of the kitchen counter.", "type": "attribute", "weight": 2 }, { "content": "On the left of the man is a cupboard of the kitchen.", "type": "attribute", "weight": 1 }, { "content": "The camera switches to the woman again.", "type": "camera", "weight": 3 }, { "content": "The woman still sits by the table.", "type": "attribute", "weight": 2 }, { "content": "The woman holds her chin with her hand.", "type": "attribute", "weight": 2 }, { "content": "The woman's eyes glare forward.", "type": "attribute", "weight": 2 }, { "content": "The woman looks down.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0304", "video_path": "VELOCITI/v_LRPEYUlE8rU_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a long-haired blonde woman in a short-sleeved dark shirt. She looks forward, then the camera switches the screen. A man in a grey coat and a hat stands by the kitchen counter with a transparent glass in his hand. Some cookers are placed on the countertop. The man looks at the woman with the glass in his hand and walks forward. The camera switches to the left with the man’s movement.\nThen the camera switches back to the woman. She still sits by the table. A key chain is held in her hand swinging slightly. She looks at the man with a smile on her face. The light is dim. Then, The camera switches to the close-up of the man. He looks at the woman and begin to talk. The camera returns back to the close-up of the woman. She has the key chain in her right hand and tosses it to the man. Then the camera captures the man’ action to catch the chain.", "events": [ { "event": "The video begins as the camera zooms in on a long-haired blonde woman in a short-sleeved dark shirt. She looks forward, then the camera switches the screen. A man in a grey coat and a hat stands by the kitchen counter with a transparent glass in his hand. Some cookers are placed on the countertop. The man looks at the woman with the glass in his hand and walks forward. The camera switches to the left with the man’s movement.", "visual_elements": [ { "content": "The camera zooms in on a long-haired blonde woman in a short-sleeved dark shirt.", "type": "camera", "weight": 3 }, { "content": "The long-haired woman looks forward.", "type": "action", "weight": 2 }, { "content": "Then the camera switches the screen.", "type": "camera", "weight": 3 }, { "content": "There is a man in a grey coat and a hat.", "type": "attribute", "weight": 3 }, { "content": "The man stands by the kitchen counter.", "type": "attribute", "weight": 2 }, { "content": "The man holds a transparent glass in his hand.", "type": "attribute", "weight": 2 }, { "content": "Some cookers are placed on the countertop.", "type": "scene", "weight": 2 }, { "content": "The man looks at the woman with the glass in his hand and walks forward.", "type": "action", "weight": 3 }, { "content": "The camera switches to the left with the man’s movement.", "type": "camera", "weight": 3 } ] }, { "event": "Then the camera switches back to the woman. She still sits by the table. A key chain is held in her hand swinging slightly. She looks at the man with a smile on her face. The light is dim. Then, The camera switches to the close-up of the man. He looks at the woman and begin to talk. The camera returns back to the close-up of the woman. She has the key chain in her right hand and tosses it to the man. Then the camera captures the man’ action to catch the chain.", "visual_elements": [ { "content": "Then the camera switches back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman still sits by the table.", "type": "attribute", "weight": 2 }, { "content": "A key chain is held in the woman's hand swinging slightly.", "type": "action", "weight": 3 }, { "content": "The woman looks at the man with a smile on her face.", "type": "attribute", "weight": 2 }, { "content": "The light is dim.", "type": "scene", "weight": 1 }, { "content": "The camera switches to the close-up of the man.", "type": "camera", "weight": 3 }, { "content": "The man looks at the woman and begin to talk.", "type": "action", "weight": 3 }, { "content": "The camera returns back to the close-up of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman has the key chain in her right hand and tosses it to the man.", "type": "action", "weight": 3 }, { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The camera captures the man’ action to catch the chain.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 20 }, { "index": "TUNA_0305", "video_path": "VELOCITI/v_LRPEYUlE8rU_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a young man sitting in the driver's seat of a car. He wears a dark jacket, a grey hoodie and a black woolen hat, holding the steering wheel in his hand. Some houses and trees can be seen in the background. The camera stays in the car, and the young man opens the door and gets off.\nAfter the young man gets off the car completely, the camera switches to the outside of the car and moves to the right. He goes to the front of the car to open the hood. At this time, A pair of black headphones can be seen hanging around his neck. A blue pickup truck and some old houses are in the background.", "events": [ { "event": "The video begins as the camera zooms in on a young man sitting in the driver's seat of a car. He wears a dark jacket, a grey hoodie and a black woolen hat, holding the steering wheel in his hand. Some houses and trees can be seen in the background. The camera stays in the car, and the young man opens the door and gets off.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a young man sitting in the driver's seat of a car.", "type": "camera", "weight": 3 }, { "content": "The young man wears a dark jacket, a grey hoodie and a black woolen hat.", "type": "attribute", "weight": 3 }, { "content": "The young man holds the steering wheel in his hand.", "type": "attribute", "weight": 2 }, { "content": "Some houses and trees can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "The camera stays in the car.", "type": "camera", "weight": 3 }, { "content": "The young man opens the door and gets off.", "type": "action", "weight": 2 } ] }, { "event": "After the young man gets off the car completely, the camera switches to the outside of the car and moves to the right. He goes to the front of the car to open the hood. At this time, A pair of black headphones can be seen hanging around his neck. A blue pickup truck and some old houses are in the background.", "visual_elements": [ { "content": "After the young man gets off the car completely.", "type": "attribute", "weight": 1 }, { "content": "The camera switches to the outside of the car", "type": "camera", "weight": 3 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The young man goes to the front of the car to open the hood.", "type": "action", "weight": 3 }, { "content": "At this time, A pair of black headphones can be seen hanging around his neck.", "type": "attribute", "weight": 2 }, { "content": "A blue pickup truck and some old houses are in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0306", "video_path": "VELOCITI/v_LTqaoTSCTc0_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a young man who wears a dark sweater with two gray horizontal stripes. A work permit hangs on his collar. He walks out of the office door with a roll of paper in his hand. There is a middle-aged man in the office in a light blue shirt, sitting at his desk and looking down at the documents. Glass partition of the office and some office equipments can be seen in the background, including computers, printers, telephones and desk lamps.\nAfter that, the camera switches to another young man. He wears a long-sleeved light blue T-shirt with a number“1”and Stars and Stripes printed on it. On his collar hangs a work permit. He is walking in a corridor with food in his hand. The camera moves with the man in the blue T-shirt.\nThen, the camera switches inside a room. Several printers and some file boxes are in the room. A man in a dark sweater pulls the man in the blue T-shirt into the room and closes the door. Then the camera closes up and aims at two young men's faces. They are communicating.", "events": [ { "event": "The video begins as the camera zooms in on a young man who wears a dark sweater with two gray horizontal stripes. A work permit hangs on his collar. He walks out of the office door with a roll of paper in his hand. There is a middle-aged man in the office in a light blue shirt, sitting at his desk and looking down at the documents. Glass partition of the office and some office equipments can be seen in the background, including computers, printers, telephones and desk lamps.", "visual_elements": [ { "content": "The camera zooms in on a young man.", "type": "camera", "weight": 3 }, { "content": "The young man wears a dark sweater.", "type": "attribute", "weight": 2 }, { "content": "The sweater has two gray horizontal stripes.", "type": "attribute", "weight": 2 }, { "content": "A work permit hangs on his collar.", "type": "attribute", "weight": 2 }, { "content": "The young man walks out of the office door with a roll of paper in his hand.", "type": "action", "weight": 3 }, { "content": "There is a middle-aged man in the office.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man is in a light blue shirt.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man is sitting at his desk and looking down at the documents.", "type": "attribute", "weight": 3 }, { "content": "Glass partition of the office and some office equipments can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The office equipments include computers, printers, telephones and desk lamps.", "type": "scene", "weight": 2 } ] }, { "event": "After that, the camera switches to another young man. He wears a long-sleeved light blue T-shirt with a number“1”and Stars and Stripes printed on it. On his collar hangs a work permit. He is walking in a corridor with food in his hand. The camera moves with the man in the blue T-shirt.", "visual_elements": [ { "content": "The camera switches to another young man.", "type": "camera", "weight": 3 }, { "content": "The young man wears a long-sleeved light blue T-shirt.", "type": "attribute", "weight": 2 }, { "content": "with a number“1”and Stars and Stripes printed on it.", "type": "attribute", "weight": 3 }, { "content": "On the young man's collar hangs a work permit.", "type": "attribute", "weight": 2 }, { "content": "The young man is walking in a corridor with food in his hand.", "type": "action", "weight": 3 }, { "content": "The camera moves with the man in the blue T-shirt.", "type": "camera", "weight": 3 } ] }, { "event": "Then, the camera switches inside a room. Several printers and some file boxes are in the room. A man in a dark sweater pulls the man in the blue T-shirt into the room and closes the door. Then the camera closes up and aims at two young men's faces. They are communicating.", "visual_elements": [ { "content": "The camera switches inside a room.", "type": "camera", "weight": 3 }, { "content": "Several printers and some file boxes are in the room.", "type": "scene", "weight": 2 }, { "content": "A man in a dark sweater pulls the man in the blue T-shirt into the room and closes the door.", "type": "action", "weight": 3 }, { "content": "The camera closes up.", "type": "camera", "weight": 3 }, { "content": "The camera aims at two young men's faces.", "type": "camera", "weight": 3 }, { "content": "The two men are communicating.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0307", "video_path": "VELOCITI/v_LepvQqFvoWc_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a young man in a red coat and a light shirt. The young man wears a necklace and he is running quickly to the left of the screen. The camera moves with the man's action of running, keeping the screen in his side view. Some vague buildings can be seen in the background. Then the man in the red coat dances in circle arm in arm with another man in a blue sleeveless shirt. He lets go of the man in the blue sleeveless shirt, and then he laughs and holds his arms high and steps back.\nThen, the camera switches to a man in a short-sleeved floral shirt and a black jacket. The camera switches with his movement. He runs towards the young man in the sleeveless blue shirt, and then they also dance together in circle arm in arm. Then, he steps back. More people appear on the screen, including a woman in a light shirt and a man in a brown suit in the foreground.", "events": [ { "event": "The video begins as the camera zooms in on a young man in a red coat and a light shirt. The young man wears a necklace and he is running quickly to the left of the screen. The camera moves with the man's action of running, keeping the screen in his side view. Some vague buildings can be seen in the background. Then the man in the red coat dances in circle arm in arm with another man in a blue sleeveless shirt. He lets go of the man in the blue sleeveless shirt, and then he laughs and holds his arms high and steps back.", "visual_elements": [ { "content": "The camera zooms in on a young man in a red coat and a light shirt.", "type": "camera", "weight": 3 }, { "content": "The young man wears a necklace.", "type": "attribute", "weight": 2 }, { "content": "The young man is running quickly to the left of the screen.", "type": "action", "weight": 3 }, { "content": "The camera moves with the man's action of running.", "type": "camera", "weight": 3 }, { "content": "The camera keeps in his side view.", "type": "camera", "weight": 3 }, { "content": "Some vague buildings can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "Then the man in the red coat dances in circle arm in arm with another man in a blue sleeveless shirt.", "type": "action", "weight": 3 }, { "content": "The man in the red coat lets go of the man in the blue sleeveless shirt.", "type": "action", "weight": 2 }, { "content": "The man in the red coat laughs and holds his arms high and steps back.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches to a man in a short-sleeved floral shirt and a black jacket. The camera switches with his movement. He runs towards the young man in the sleeveless blue shirt, and then they also dance together in circle arm in arm. Then, he steps back. More people appear on the screen, including a woman in a light shirt and a man in a brown suit in the foreground.", "visual_elements": [ { "content": "The camera switches to a man in a short-sleeved floral shirt and a black jacket.", "type": "camera", "weight": 3 }, { "content": "The camera switches with his movement.", "type": "camera", "weight": 3 }, { "content": "The man runs towards the young man in the sleeveless blue shirt.", "type": "action", "weight": 3 }, { "content": "The two men also dance together in circle arm in arm.", "type": "action", "weight": 3 }, { "content": "The man steps back and more people appear on the screen.", "type": "attribute", "weight": 2 }, { "content": "More people include a woman in a light shirt and a man in a brown suit in the foreground.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0308", "video_path": "VELOCITI/v_Lsm-snDPXnk_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a middle-aged woman. She wears a white shirt and a pink cardigan. She is talking while holding a glass goblet filled with drinks in her left hand. She stands in an indoor environment. The wallpaper with patterns and a vase can be seen in the background.\nThen, the camera switches to another man in a dark shirt, who is talking with his head down. On his right behind him sits another young man in a red shirt. A desk lamp and a brown vase with a pink bouquet can be seen in the background. The camera switches back to the middle-aged woman again. She is still standing in the same position, and her body moves slightly.\nThen, the camera switches back to the young man in the red shirt. He has long brown hair and a yellowish-brown beard. He leans back on a beige sofa and looks to his left. Behind him are closed shutters.\nThe camera switches to a young woman with blonde hair who wears a rose-red short sleeves and a necklace. After saying a few words, she holds a cake in her left hand and puts it in her mouth with a fork in her right hand.", "events": [ { "event": "The video begins as the camera zooms in on a middle-aged woman. She wears a white shirt and a pink cardigan. She is talking while holding a glass goblet filled with drinks in her left hand. She stands in an indoor environment. The wallpaper with patterns and a vase can be seen in the background.", "visual_elements": [ { "content": "The camera zooms in on a middle-aged woman.", "type": "camera", "weight": 3 }, { "content": "The middle-aged woman wears a white shirt and a pink cardigan.", "type": "attribute", "weight": 2 }, { "content": "She is talking while holding a glass goblet filled with drinks in her left hand.", "type": "action", "weight": 3 }, { "content": "The middle-aged woman stands in an indoor environment.", "type": "attribute", "weight": 2 }, { "content": "The wallpaper with patterns and a vase can be seen in the background.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to another man in a dark shirt, who is talking with his head down. On his right behind him sits another young man in a red shirt. A desk lamp and a brown vase with a pink bouquet can be seen in the background. The camera switches back to the middle-aged woman again. She is still standing in the same position, and her body moves slightly.", "visual_elements": [ { "content": "The camera switches to another man in a dark shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the dark shirt is talking with his head down.", "type": "action", "weight": 3 }, { "content": "On the right behind the man in the dark shirt sits another young man in a red shirt.", "type": "attribute", "weight": 3 }, { "content": "A desk lamp and a brown vase with a pink bouquet can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The camera switches back to the middle-aged woman again.", "type": "camera", "weight": 3 }, { "content": "The middle-aged woman is still standing in the same position.", "type": "attribute", "weight": 1 }, { "content": "The body of the middle-aged woman moves slightly.", "type": "action", "weight": 1 } ] }, { "event": "Then, the camera switches back to the young man in the red shirt. He has long brown hair and a yellowish-brown beard. He leans back on a beige sofa and looks to his left. Behind him are closed shutters.", "visual_elements": [ { "content": "The camera switches back to the young man in the red shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the red shirt has long brown hair and a yellowish-brown beard.", "type": "attribute", "weight": 3 }, { "content": "The man in the red shirt leans back on a beige sofa and looks to his left.", "type": "attribute", "weight": 3 }, { "content": "Behind him are closed shutters.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches to a young woman with blonde hair who wears a rose-red short sleeves and a necklace. After saying a few words, she holds a cake in her left hand and puts it in her mouth with a fork in her right hand.", "visual_elements": [ { "content": "The camera switches to a young woman with blonde hair who wears a rose-red short sleeves and a necklace.", "type": "camera", "weight": 3 }, { "content": "After saying a few words, the blonde woman holds a cake in her left hand.", "type": "action", "weight": 2 }, { "content": "The blonde woman puts the cake in her mouth with a fork in her right hand.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0309", "video_path": "VELOCITI/v_Lsm-snDPXnk_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on an indoor scene. A lamp and a brown vase can be seen in the background. A man in black clothes sits on the sofa and talks sideways with his body leaning forward slightly. Beside him sits a young man in red clothes leaning on the sofa.\nThe camera switches to a young woman in pink clothes. She has her hair combs in a bun. Then, the camera switches to a middle-aged woman in a white shirt and a purple cardigan. She stands in the room talking.\nThen, the camera goes back to the two men on the sofa. The man in black holds a cigarette in his hand and the young man in red holds a plate with a cake in his hand. He is eating the cake with a fork. Behind him are shutters. The camera switches to a middle-aged woman in the purple cardigan again. Her expression is serious, and her left hand points forward while talking.", "events": [ { "event": "The video begins as the camera zooms in on an indoor scene. A lamp and a brown vase can be seen in the background. A man in black clothes sits on the sofa and talks sideways with his body leaning forward slightly. Beside him sits a young man in red clothes leaning on the sofa.", "visual_elements": [ { "content": "The camera zooms in on an indoor scene.", "type": "camera", "weight": 3 }, { "content": "A lamp and a brown vase can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "A man in black clothes sits on the sofa.", "type": "attribute", "weight": 2 }, { "content": "The man in black talks sideways with his body leaning forward slightly.", "type": "action", "weight": 3 }, { "content": "Beside him sits a young man in red clothes.", "type": "attribute", "weight": 2 }, { "content": "The man in red clothes leans on the sofa.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to a young woman in pink clothes. She has her hair combs in a bun. Then, the camera switches to a middle-aged woman in a white shirt and a purple cardigan. She stands in the room talking.", "visual_elements": [ { "content": "The camera switches to a young woman in pink clothes.", "type": "camera", "weight": 3 }, { "content": "The woman has her hair combs in a bun.", "type": "attribute", "weight": 2 }, { "content": "The camera switches to a middle-aged woman in a white shirt and a purple cardigan.", "type": "camera", "weight": 3 }, { "content": "The middle-aged woman stands in the room talking.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera goes back to the two men on the sofa. The man in black holds a cigarette in his hand and the young man in red holds a plate with a cake in his hand. He is eating the cake with a fork. Behind him are shutters. The camera switches to a middle-aged woman in the purple cardigan again. Her expression is serious, and her left hand points forward while talking.", "visual_elements": [ { "content": "The camera goes back to the two men on the sofa.", "type": "camera", "weight": 3 }, { "content": "The man in black holds a cigarette in his hand.", "type": "attribute", "weight": 3 }, { "content": "The young man in red holds a plate with a cake in his hand.", "type": "attribute", "weight": 3 }, { "content": "The young man in red is eating the cake with a fork.", "type": "action", "weight": 3 }, { "content": "Behind him are shutters.", "type": "scene", "weight": 2 }, { "content": "The camera switches to a middle-aged woman in the purple cardigan again.", "type": "camera", "weight": 3 }, { "content": "The expression of the middle-aged woman is serious.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged woman's left hand points forward while talking.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0310", "video_path": "VELOCITI/v_MPqg9uMHTDQ_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a man and a woman sitting on the sofa. The man wears a plaid shirt and a dark inner wear, with a cup in his hand. The woman wears a green shirt and her left hand wearing a ring puts on her crossed legs. The man points forward with his finger talking, and then he lifts a teacup. A weaved basket and some plants can be seen in the background. The curtain is translucent.\nThe camera switches to the kitchen area, and a man wearing a light-colored sweater and a shirt and black-rimmed glasses walks to the living room with a tray. In the background, another woman in a light-colored shirt and a striped cardigan can be seen busy in the kitchen. Some drinks is on the table. There are also several pots of green plants and a desk lamp in the background.\nThe camera follows the man to the left. He walks to the sofa with a cake on the tray in his hand. The window of the living room is large, and the curtains are opened. Some green plants can be seen outside. The man walks to the sofa and puts the cake on the coffee table. The man and the woman on the sofa still sits there. The man in the plaid shirt still holds the teacup, while the woman next to him looks to the man who puts the cake on the table.", "events": [ { "event": "The video begins as the camera zooms in on a man and a woman sitting on the sofa. The man wears a plaid shirt and a dark inner wear, with a cup in his hand. The woman wears a green shirt and her left hand wearing a ring puts on her crossed legs. The man points forward with his finger talking, and then he lifts a teacup. A weaved basket and some plants can be seen in the background. The curtain is translucent.", "visual_elements": [ { "content": "The video begins as the camera zooms in on a man and a woman sitting on the sofa.", "type": "camera", "weight": 3 }, { "content": "The man wears a plaid shirt and a dark inner wear.", "type": "attribute", "weight": 2 }, { "content": "The man has a cup in his hand.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a green shirt", "type": "attribute", "weight": 3 }, { "content": "The woman's left hand wearing a ring puts on her crossed legs.", "type": "attribute", "weight": 2 }, { "content": "The man points forward with his finger talking, and then he lifts a teacup.", "type": "action", "weight": 3 }, { "content": "A weaved basket and some plants can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The curtain is translucent.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches to the kitchen area, and a man wearing a light-colored sweater and a shirt and black-rimmed glasses walks to the living room with a tray. In the background, another woman in a light-colored shirt and a striped cardigan can be seen busy in the kitchen. Some drinks is on the table. There are also several pots of green plants and a desk lamp in the background.", "visual_elements": [ { "content": "The camera switches to the kitchen area.", "type": "camera", "weight": 3 }, { "content": "A man wearing a light-colored sweater and a shirt and black-rimmed glasses walks to the living room with a tray.", "type": "action", "weight": 3 }, { "content": "In the background, another woman in a light-colored shirt and a striped cardigan can be seen busy in the kitchen.", "type": "scene", "weight": 2 }, { "content": "Some drinks is on the table.", "type": "attribute", "weight": 1 }, { "content": "There are also several pots of green plants and a desk lamp in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera follows the man to the left. He walks to the sofa with a cake on the tray in his hand. The window of the living room is large, and the curtains are opened. Some green plants can be seen outside. The man walks to the sofa and puts the cake on the coffee table. The man and the woman on the sofa still sits there. The man in the plaid shirt still holds the teacup, while the woman next to him looks to the man who puts the cake on the table.", "visual_elements": [ { "content": "The camera follows the man to the left.", "type": "camera", "weight": 3 }, { "content": "The man in the light-colored shirt walks to the sofa.", "type": "action", "weight": 3 }, { "content": "The tray in the hand of the man who wears a light-colored shirt has a cake on it.", "type": "action", "weight": 3 }, { "content": "The window of the living room is large.", "type": "scene", "weight": 2 }, { "content": "The curtains are opened.", "type": "scene", "weight": 2 }, { "content": "Some green plants can be seen outside.", "type": "scene", "weight": 2 }, { "content": "The man walks to the sofa and puts the cake on the coffee table.", "type": "action", "weight": 3 }, { "content": "The man and the woman on the sofa still sits there.", "type": "attribute", "weight": 2 }, { "content": "The man in the plaid shirt still holds the teacup.", "type": "attribute", "weight": 2 }, { "content": "The woman next to him looks to the man who puts the cake on the table.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0311", "video_path": "VELOCITI/v_MUEhAUpa7iA_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a man and a woman appear on the screen. The man wears black clothes and he is lying on his stomach on the beach. The woman is lying on the beach in a white dress. There are several thick ropes scattered around them, and some ropes are wrapped around their wrists.\nThe camera switches to the middle scene, and the man on the right side of the screen holds up his body vigorously. Then he kneels on one knee and holds the woman's wrist tightly, and they stand up together. Beaches, rocks and sea water can be seen in the background. The camera switches to a closer perspective again, shooting from the left behind the man, The screen shows the whole picture of the woman, who wears a white necklace and her hair is wet over her shoulders.", "events": [ { "event": "At the beginning of the video, a man and a woman appear on the screen. The man wears black clothes and he is lying on his stomach on the beach. The woman is lying on the beach in a white dress. There are several thick ropes scattered around them, and some ropes are wrapped around their wrists.", "visual_elements": [ { "content": "A man and a woman appear on the screen.", "type": "attribute", "weight": 2 }, { "content": "The man wears black clothes and he is lying on his stomach on the beach.", "type": "attribute", "weight": 3 }, { "content": "The woman is lying on the beach in a white dress.", "type": "attribute", "weight": 3 }, { "content": "There are several thick ropes scattered around them.", "type": "attribute", "weight": 2 }, { "content": "Some ropes are wrapped around their wrists.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to the middle scene, and the man on the right side of the screen holds up his body vigorously. Then he kneels on one knee and holds the woman's wrist tightly, and they stand up together. Beaches, rocks and sea water can be seen in the background. The camera switches to a closer perspective again, shooting from the left behind the man, The screen shows the whole picture of the woman, who wears a white necklace and her hair is wet over her shoulders.", "visual_elements": [ { "content": "The camera switches to the middle scene.", "type": "camera", "weight": 3 }, { "content": "The man on the right side of the screen holds up his body vigorously.", "type": "action", "weight": 3 }, { "content": "The man kneels on one knee and holds the woman's wrist tightly.", "type": "action", "weight": 3 }, { "content": "The man and the woman stand up together.", "type": "action", "weight": 3 }, { "content": "Beaches, rocks and sea water can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The camera switches to a closer perspective again.", "type": "camera", "weight": 3 }, { "content": "The camera shoots from the left behind the man.", "type": "camera", "weight": 3 }, { "content": "The screen shows the whole picture of the woman.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a white necklace.", "type": "attribute", "weight": 3 }, { "content": "The woman's hair is wet over her shoulders.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0312", "video_path": "VELOCITI/v_MVY7ci-BTI4_seg_155_165.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a man. He throws his head back. The background is dim. Then, the camera switches to a platform on which the man falls down from above. He is in dark clothes. There are metal balustrades around the platform and the light reflects on the screen.\nThe camera then switches to another man in a dark jacket and jeans. He holds a weapon and walks along the platform to the fallen man. The camera follows his movements from the front. Then the camera switches perspective again and shoots from behind the man in the dark jacket. The man in the dark jacket bends down to check the fallen man.\nThe camera switches to another perspective. The man in the dark jacket stands up, and the camera shows a woman who wears a light-colored shirt and a black coat approaches them from faraway. There is a white patterned handbag in the woman's hand.", "events": [ { "event": "The video begins as the camera zooms in on a man. He throws his head back. The background is dim. Then, the camera switches to a platform on which the man falls down from above. He is in dark clothes. There are metal balustrades around the platform and the light reflects on the screen.", "visual_elements": [ { "content": "The camera zooms in on a man.", "type": "camera", "weight": 3 }, { "content": "The man throws his head back.", "type": "action", "weight": 2 }, { "content": "The background is dim.", "type": "scene", "weight": 1 }, { "content": "The camera switches to a platform.", "type": "camera", "weight": 3 }, { "content": "The man falls down from above on the paltform.", "type": "action", "weight": 3 }, { "content": "The man is in dark clothes.", "type": "attribute", "weight": 2 }, { "content": "There are metal balustrades around the platform and the light reflects on the screen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then switches to another man in a dark jacket and jeans. He holds a weapon and walks along the platform to the fallen man. The camera follows his movements from the front. Then the camera switches perspective again and shoots from behind the man in the dark jacket. The man in the dark jacket bends down to check the fallen man.", "visual_elements": [ { "content": "The camera then switches to another man in a dark jacket and jeans.", "type": "camera", "weight": 3 }, { "content": "The man in the dark jacket holds a weapon and walks along the platform to the fallen man.", "type": "action", "weight": 3 }, { "content": "The camera follows the movements of the man in the dark jacket from the front.", "type": "camera", "weight": 3 }, { "content": "The camera switches perspective again.", "type": "camera", "weight": 3 }, { "content": "The camera shoots from behind the man in the dark jacket.", "type": "camera", "weight": 3 }, { "content": "The man in the dark jacket bends down to check the fallen man.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to another perspective. The man in the dark jacket stands up, and the camera shows a woman who wears a light-colored shirt and a black coat approaches them from faraway. There is a white patterned handbag in the woman's hand.", "visual_elements": [ { "content": "The camera switches to another perspective.", "type": "camera", "weight": 3 }, { "content": "The man in the dark jacket stands up.", "type": "action", "weight": 2 }, { "content": "The camera shows a woman who wears a light-colored shirt and a black coat approaches them from faraway. There is a white patterned handbag in the woman's hand.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0313", "video_path": "VELOCITI/v_Mkkd7taPqEY_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on a dark indoor scene. A middle-aged woman in a patterned blouse and blue trousers appears in the center of the screen. She carries a white patterned handbag in her hand. She walks to a man sitting in a chair on the right side of the screen. He wears dark clothes. He puts his hands on his knees and his eyes fixed on the front. At the same time, there appears a doctor in a white coat standing on the stairs.\nThe middle-aged woman walks up to the sitting man. Then, the sitting man stands up and walks towards the stairs. The doctor goes upstairs with the man successively. The middle-aged woman stands in front of the chair that the man used to sit on and watches the man going upstairs. Then the middle-aged woman handles the man's hand and then lets it go. At last, the middle-aged woman intents to sit on the chair.", "events": [ { "event": "The video begins as the camera zooms in on a dark indoor scene. A middle-aged woman in a patterned blouse and blue trousers appears in the center of the screen. She carries a white patterned handbag in her hand. She walks to a man sitting in a chair on the right side of the screen. He wears dark clothes. He puts his hands on his knees and his eyes fixed on the front. At the same time, there appears a doctor in a white coat standing on the stairs.", "visual_elements": [ { "content": "The camera zooms in on a dark indoor scene.", "type": "camera", "weight": 3 }, { "content": "A middle-aged woman in a patterned blouse and blue trousers appears in the center of the screen.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged woman carries a white patterned handbag in her hand.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged woman walks to a man sitting in a chair on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The man wears dark clothes.", "type": "attribute", "weight": 2 }, { "content": "The man puts his hands on his knees", "type": "attribute", "weight": 2 }, { "content": "The man's eyes fixed on the front.", "type": "attribute", "weight": 1 }, { "content": "On the screen appears a doctor in a white coat standing on the stairs.", "type": "attribute", "weight": 2 } ] }, { "event": "The middle-aged woman walks up to the sitting man. Then, the sitting man stands up and walks towards the stairs. The doctor goes upstairs with the man successively. The middle-aged woman stands in front of the chair that the man used to sit on and watches the man going upstairs. Then the middle-aged woman handles the man's hand and then lets it go. At last, the middle-aged woman intents to sit on the chair.", "visual_elements": [ { "content": "The middle-aged woman walks up to the sitting man.", "type": "action", "weight": 2 }, { "content": "The sitting man stands up and walks towards the stairs.", "type": "action", "weight": 3 }, { "content": "The doctor and the man go upstairs successively.", "type": "action", "weight": 3 }, { "content": "The middle-aged woman stands in front of the chair that the man used to sit on and watches the man going upstairs.", "type": "action", "weight": 2 }, { "content": "The middle-aged woman handles the man's hand and then lets it go.", "type": "action", "weight": 2 }, { "content": "The middle-aged woman intents to sit on the chair.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0314", "video_path": "VELOCITI/v_MpGCnuiCSuU_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins as the camera zooms in on the inside of a tent. The light inside the tent is dim. A young woman in a white vest and short pants and a young man in a grey T-shirt sit in the tent. The man is treating the wound on the woman's leg.\nNext, the camera switches to a bloody scene. A pair of legs covered with blood appear in the screen. The camera moves upwards quickly. A man in a blue shirt is lying on the ground with bloodstains on the right side of his head. Then, the camera switches to a young woman who wears a light-colored shirt and black-framed glasses. She kneels on the ground to help the lying man in a panic. Then she leaves after calling a man in a pale blue shirt. The camera switches again to the legs covered with blood. Then the camera moves upwards and shows the man in the pale blue shirt kneeling beside the wounded man. He is too bewildered to know what to do. Green forests and blue sky can be seen in the background.", "events": [ { "event": "The video begins as the camera zooms in on the inside of a tent. The light inside the tent is dim. A young woman in a white vest and short pants and a young man in a grey T-shirt sit in the tent. The man is treating the wound on the woman's leg.", "visual_elements": [ { "content": "The camera zooms in on the inside of a tent.", "type": "camera", "weight": 3 }, { "content": "The light in the tent is dim.", "type": "scene", "weight": 2 }, { "content": "A young woman in a white vest and short pants and a young man in a grey T-shirt sit in the tent.", "type": "attribute", "weight": 3 }, { "content": "The man is treating the wound on the woman's leg.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to a bloody scene. A pair of legs covered with blood appear in the screen. The camera moves upwards quickly. A man in a blue shirt is lying on the ground with bloodstains on the right side of his head. Then, the camera switches to a young woman who wears a light-colored shirt and black-framed glasses. She kneels on the ground to help the lying man in a panic. Then she leaves after calling a man in a pale blue shirt. The camera switches again to the legs covered with blood. Then the camera moves upwards and shows the man in the pale blue shirt kneeling beside the wounded man. He is too bewildered to know what to do. Green forests and blue sky can be seen in the background.", "visual_elements": [ { "content": "Suddenly, the camera switches to a bloody scene.", "type": "camera", "weight": 3 }, { "content": "A pair of legs covered with blood appear in the screen.", "type": "attribute", "weight": 3 }, { "content": "The camera moves upwards quickly.", "type": "camera", "weight": 3 }, { "content": "A man in a blue shirt is lying on the ground.", "type": "attribute", "weight": 2 }, { "content": "Bloodstains are on the right side of his head.", "type": "attribute", "weight": 2 }, { "content": "The camera switches to a young woman who wears a light-colored shirt and black-framed glasses.", "type": "camera", "weight": 3 }, { "content": "The youn woman kneels on the ground to help the lying man in a panic.", "type": "action", "weight": 3 }, { "content": "The woman leaves after calling a man in a pale blue shirt.", "type": "action", "weight": 2 }, { "content": "The camera switches again to the legs covered with blood.", "type": "camera", "weight": 3 }, { "content": "The camera moves upwards.", "type": "camera", "weight": 3 }, { "content": "The man in the pale blue shirt kneeling beside the wounded man.", "type": "action", "weight": 2 }, { "content": "He is too bewildered to know what to do.", "type": "attribute", "weight": 2 }, { "content": "Green forests and blue sky can be seen in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0315", "video_path": "VELOCITI/v_N2EC0gAi2lk_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The scene opens with the camera zooming in on a man. He’s blonde, dressed in chainmail and a black cloak, with blood dripping from a wound on his left shoulder. He’s leaning against a gray brick wall. His right hand is tightly held by another person, who’s trying to help him stand. The camera then pans slightly to the right, revealing the woman holding his hand. She has black hair and is wearing clothes that look like animal hide, exposing her arms and midriff.\nThe camera continues to pan right, showing the woman pulling the man up with effort, helping him stand steady. The man looks extremely weak, leaning forward, while the woman supports him with all her strength. The background reveals a narrow alley, flanked by tall brick walls, with weeds and scattered gravel on the ground.\nThe camera cuts to the other side, revealing a man in black armor, leaning against the wall as if observing the situation ahead. The camera then cuts back to the blonde man and the black-haired woman. The woman drapes the man's arm over her shoulder and slowly begins to move forward.\nThe camera follows the blonde man and the black-haired woman as they move slowly through the alley. The woman supports the man with one hand, while the other grips a long sword, her eyes scanning the surroundings warily. The camera then cuts to a front view, showing the blonde man and the black-haired woman facing the man in black armor. The distance between the three of them steadily closes.\nThe camera zooms in on the black-haired woman’s face. Her expression is serious, her gaze unwavering. She raises the sword in her hand, and the camera cuts to the man in black armor. The woman thrusts the sword through his face. The camera then cuts back to a front view of the woman, and the video stops.", "events": [ { "event": "The scene opens with the camera zooming in on a man. He’s blonde, dressed in chainmail and a black cloak, with blood dripping from a wound on his left shoulder. He’s leaning against a gray brick wall. His right hand is tightly held by another person, who’s trying to help him stand. The camera then pans slightly to the right, revealing the woman holding his hand. She has black hair and is wearing clothes that look like animal hide, exposing her arms and midriff.", "visual_elements": [ { "content": "The video begins with the camera focused on a blonde-haired man.", "type": "camera", "weight": 3 }, { "content": "The blonde-haired man is wearing chainmail and a black cloak, with blood streaming from a wound on his left shoulder.", "type": "attribute", "weight": 3 }, { "content": "The blonde-haired man is leaning against a gray brick wall.", "type": "action", "weight": 3 }, { "content": "The blonde-haired man's right hand is tightly grasped by another person's hand.", "type": "attribute", "weight": 3 }, { "content": "The blonde-haired man attempts to stand up.", "type": "action", "weight": 3 }, { "content": "The camera slightly pans to the right.", "type": "camera", "weight": 3 }, { "content": "The blonde-haired man's hand is tightly grasped by a woman with black hair.", "type": "attribute", "weight": 3 }, { "content": "The woman with black hair is wearing clothing resembling animal pelts, exposing her arms and midriff.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera continues to pan right, showing the woman pulling the man up with effort, helping him stand steady. The man looks extremely weak, leaning forward, while the woman supports him with all her strength. The background reveals a narrow alley, flanked by tall brick walls, with weeds and scattered gravel on the ground.", "visual_elements": [ { "content": "The camera continues to pan to the right.", "type": "camera", "weight": 3 }, { "content": "The woman pulls the man up with force, helping him steady himself.", "type": "action", "weight": 3 }, { "content": "The man appears very weak, his body leaning forward.", "type": "attribute", "weight": 3 }, { "content": "The woman supports him firmly, holding him up with effort.", "type": "action", "weight": 3 }, { "content": "In the background, the scene shows a narrow alley with tall brick walls on either side. The ground is scattered with weeds and gravel.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to the other side, revealing a man in black armor, leaning against the wall as if observing the situation ahead. The camera then cuts back to the blonde man and the black-haired woman. The woman drapes the man's arm over her shoulder and slowly begins to move forward.", "visual_elements": [ { "content": "The man and the woman walk off to the left side of the frame, leaving the scene.", "type": "action", "weight": 3 }, { "content": "A middle-aged man with a pale complexion appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing black armor.", "type": "attribute", "weight": 3 }, { "content": "The man in armor is leaning against the wall.", "type": "attribute", "weight": 3 }, { "content": "The camera cuts back to the blonde-haired man and the black-haired woman.", "type": "camera", "weight": 3 }, { "content": "The woman drapes the man's arm over her shoulder and slowly moves forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera follows the blonde man and the black-haired woman as they move slowly through the alley. The woman supports the man with one hand, while the other grips a long sword, her eyes scanning the surroundings warily. The camera then cuts to a front view, showing the blonde man and the black-haired woman facing the man in black armor. The distance between the three of them steadily closes.", "visual_elements": [ { "content": "The camera follows the movement of the blonde-haired man and the black-haired woman.", "type": "camera", "weight": 3 }, { "content": "The blonde-haired man and the black-haired woman move slowly through the alley.", "type": "action", "weight": 3 }, { "content": "The woman supports the man with one hand, while holding a longsword in the other, cautiously scanning the surroundings.", "type": "action", "weight": 3 }, { "content": "The camera slowly pans to the left.", "type": "camera", "weight": 3 }, { "content": "The blonde-haired man and the black-haired woman face the man in black armor.", "type": "attribute", "weight": 3 }, { "content": "The distance between the three of them gradually decreases.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera zooms in on the black-haired woman’s face. Her expression is serious, her gaze unwavering. She raises the sword in her hand, and the camera cuts to the man in black armor. The woman thrusts the sword through his face. The camera then cuts back to a front view of the woman, and the video stops.", "visual_elements": [ { "content": "The camera zooms in on the black-haired woman's face.", "type": "camera", "weight": 3 }, { "content": "The woman’s expression is serious, her gaze determined.", "type": "attribute", "weight": 3 }, { "content": "The woman raises the sword in her hand.", "type": "action", "weight": 3 }, { "content": "The camera cuts to the man in black armor.", "type": "camera", "weight": 3 }, { "content": "The woman thrusts the sword into the face of the man in black armor.", "type": "action", "weight": 3 }, { "content": "The camera cuts to a front view of the woman, and the video stops.", "type": "camera", "weight": 3 } ] } ], "n_events": 5, "n_elements": 31 }, { "index": "TUNA_0316", "video_path": "VELOCITI/v_NEc_n0W4ans_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The scene begins with the camera focused on a young man wearing a striped t-shirt.In the foreground of the scene, there are hands holding a green hose.The young man's hair is wet.The background is a gray wall.The man holding the hose is wearing a green t-shirt.A man walks from the left side of the scene to the right side, holding a water hose.\nThe video begins with the camera focused on a young man wearing a striped T-shirt. The shot is slightly blurred on a pair of hands holding a green hose. The young man’s hair is wet, and the background is a gray wall. The camera then gradually shifts focus to a man in a green T-shirt, who is holding the green hose and spraying water forward.\nThe camera cuts to a concert scene, where a rock band is performing on stage. The lead singer, shirtless and wearing black leather pants, stands at the center of the stage, making dramatic gestures. On either side of him, the guitarists are playing energetically. The background lights flicker, and the crowd below watches in excitement, the atmosphere charged with energy. The lights above the stage illuminate the entire venue.", "events": [ { "event": "The scene begins with the camera focused on a young man wearing a striped t-shirt.In the foreground of the scene, there are hands holding a green hose.The young man's hair is wet.The background is a gray wall.The man holding the hose is wearing a green t-shirt.A man walks from the left side of the scene to the right side, holding a water hose.", "visual_elements": [ { "content": "The scene begins with the camera focused on a young man wearing a striped t-shirt.", "type": "camera", "weight": 3 }, { "content": "In the foreground of the scene, there are hands holding a green hose.", "type": "attribute", "weight": 2 }, { "content": "The young man's hair is wet.", "type": "attribute", "weight": 3 }, { "content": "The background is a gray wall.", "type": "scene", "weight": 1 }, { "content": "The man holding the hose is wearing a green t-shirt.", "type": "attribute", "weight": 3 }, { "content": "A man walks from the left side of the scene to the right side, holding a water hose.", "type": "action", "weight": 3 } ] }, { "event": "The video begins with the camera focused on a young man wearing a striped T-shirt. The shot is slightly blurred on a pair of hands holding a green hose. The young man’s hair is wet, and the background is a gray wall. The camera then gradually shifts focus to a man in a green T-shirt, who is holding the green hose and spraying water forward.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man in the green T-shirt puts down the water hose.", "type": "action", "weight": 3 }, { "content": "A woman wearing black shorts and a white tank top is in front of the man.", "type": "attribute", "weight": 3 }, { "content": "The man bends down and splashes water at the woman with his hand.", "type": "action", "weight": 3 }, { "content": "The woman tries to dodge.", "type": "action", "weight": 2 }, { "content": "In the background, there's an old truck and clothes hanging on a clothesline.", "type": "scene", "weight": 2 }, { "content": "Then, the man walks away to his left.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a concert scene, where a rock band is performing on stage. The lead singer, shirtless and wearing black leather pants, stands at the center of the stage, making dramatic gestures. On either side of him, the guitarists are playing energetically. The background lights flicker, and the crowd below watches in excitement, the atmosphere charged with energy. The lights above the stage illuminate the entire venue.", "visual_elements": [ { "content": "The camera cuts to a concert scene.", "type": "camera", "weight": 3 }, { "content": "On stage, there's a rock band.", "type": "attribute", "weight": 3 }, { "content": "The lead singer is shirtless, wearing black leather pants.", "type": "attribute", "weight": 3 }, { "content": "The lead singer stands at the center of the stage, making exaggerated gestures.", "type": "action", "weight": 3 }, { "content": "The guitarists on both sides are also playing.", "type": "action", "weight": 2 }, { "content": "The background lights are flashing.", "type": "scene", "weight": 2 }, { "content": "The audience is watching from the crowd.", "type": "scene", "weight": 2 }, { "content": "The lights above the stage illuminate the entire venue.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0317", "video_path": "VELOCITI/v_NTGOA6lPP5w_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on the floor, where a patterned carpet lies. Two pairs of feet move across it—one pair in brown pointed shoes, the other in black leather shoes with rolled-up pant cuffs. The lighting in the background is dim.\nThe camera then cuts to a woman sitting in a chair. She’s wearing a light blue sweater and a pearl necklace, holding a book in her hands. As she reads, she smokes, with the smoke drifting above her head. In the background, a bookshelf is visible, filled with books and a decorative bird sculpture. The woman stops smoking, looks up, then lowers her head to extinguish the cigarette. She sets the book down, preparing to stand up.\nThe camera then cuts to a scene at a doorway. The woman walks toward the door, opens it, and stands to the side. Light pours in from outside, revealing a white wall with a black door lock. As she opens the door, a man in a black jacket, carrying a guitar, enters from the right side of the frame. Behind him, another man in a red and gray shirt follows. The woman steps aside to let the two men pass. The video freezes as the man with the guitar reaches the door.", "events": [ { "event": "The video begins with the camera focused on the floor, where a patterned carpet lies. Two pairs of feet move across it—one pair in brown pointed shoes, the other in black leather shoes with rolled-up pant cuffs. The lighting in the background is dim.", "visual_elements": [ { "content": "At the start of the video, the camera is focused on the ground.", "type": "camera", "weight": 3 }, { "content": "A patterned carpet is spread out on the ground.", "type": "scene", "weight": 2 }, { "content": "Two pairs of feet move on the carpet.", "type": "action", "weight": 3 }, { "content": "One pair of feet is wearing brown pointed shoes, and the other pair is in black leather shoes, with the pant legs rolled up.", "type": "attribute", "weight": 3 }, { "content": "The lighting in the background is dim.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then cuts to a woman sitting in a chair. She’s wearing a light blue sweater and a pearl necklace, holding a book in her hands. As she reads, she smokes, with the smoke drifting above her head. In the background, a bookshelf is visible, filled with books and a decorative bird sculpture. The woman stops smoking, looks up, then lowers her head to extinguish the cigarette. She sets the book down, preparing to stand up.", "visual_elements": [ { "content": "The camera then cuts to a woman sitting in a chair.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a light blue sweater, a pearl necklace around her neck, and holding a book in her hands.", "type": "attribute", "weight": 3 }, { "content": "A woman is reading a book while smoking.", "type": "action", "weight": 3 }, { "content": "Smoke spreads above the woman's head.", "type": "scene", "weight": 2 }, { "content": "In the background, a bookshelf is visible, with some books and a decorative bird sculpture on it.", "type": "scene", "weight": 2 }, { "content": "The woman stops smoking and looks up.", "type": "action", "weight": 3 }, { "content": "The woman lowers her head, puts out the cigarette, then puts down the book, preparing to stand up.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a scene at a doorway. The woman walks toward the door, opens it, and stands to the side. Light pours in from outside, revealing a white wall with a black door lock. As she opens the door, a man in a black jacket, carrying a guitar, enters from the right side of the frame. Behind him, another man in a red and gray shirt follows. The woman steps aside to let the two men pass. The video freezes as the man with the guitar reaches the door.", "visual_elements": [ { "content": "Then, the camera cuts to a scene at the door.", "type": "camera", "weight": 3 }, { "content": "The woman walks to the door, opens it, and stands beside it.", "type": "action", "weight": 3 }, { "content": "The light from outside shines in, revealing a white wall outside with a black door lock on it.", "type": "scene", "weight": 1 }, { "content": "As the woman opens the door, a man in a black jacket enters from the right side of the scene, carrying a guitar.", "type": "action", "weight": 3 }, { "content": "Behind the man with the guitar is another man wearing a red and gray shirt.", "type": "action", "weight": 3 }, { "content": "The woman steps aside to let the two men pass through the door.", "type": "action", "weight": 3 }, { "content": "The video stops as the man with the guitar reaches the door.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0318", "video_path": "VELOCITI/v_NXwxYIjqocA_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a woman. She’s wearing a floral outfit with a red and black skirt, her hair is red, and she has a pink headband on. Her hands are tied with a silk scarf, and her body is restrained by ropes. The woman tries to use her teeth to untie the scarf around her wrists. Suddenly, she stops and looks straight ahead.\nThe camera then cuts to a male character, dressed in a light-colored shirt and pants. He quickly runs up a metal staircase, takes a few steps, then quickly climbs another metal staircase. The camera follows his movements from the side as he ascends the stairs. Next, the shot changes to a character dressed as a little duck, wearing orange and red clothes. The duck runs a few steps before coming to a stop.\nThe camera cuts to the man and woman. The man approaches the woman, who is still tied to a raised platform. The background reveals a high-tech laboratory, filled with various instruments and control panels. The man unties the silk scarf around the woman's wrists. Once her hands are free, the woman moves the rope around her neck, loosening it.", "events": [ { "event": "The video begins with the camera focused on a woman. She’s wearing a floral outfit with a red and black skirt, her hair is red, and she has a pink headband on. Her hands are tied with a silk scarf, and her body is restrained by ropes. The woman tries to use her teeth to untie the scarf around her wrists. Suddenly, she stops and looks straight ahead.", "visual_elements": [ { "content": "The video begins with the camera focusing on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a floral top and a red-and-black skirt, with red hair and a pink headband.", "type": "attribute", "weight": 3 }, { "content": "The woman's hands are tied with a silk scarf.", "type": "attribute", "weight": 3 }, { "content": "The woman's body is also bound with rope.", "type": "attribute", "weight": 2 }, { "content": "The woman tries to untie the silk scarf binding her hands with her teeth.", "type": "action", "weight": 3 }, { "content": "Suddenly, the woman stops moving and looks ahead.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a male character, dressed in a light-colored shirt and pants. He quickly runs up a metal staircase, takes a few steps, then quickly climbs another metal staircase. The camera follows his movements from the side as he ascends the stairs. Next, the shot changes to a character dressed as a little duck, wearing orange and red clothes. The duck runs a few steps before coming to a stop.", "visual_elements": [ { "content": "Then, the camera cuts to a man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a light-colored shirt and pants.", "type": "attribute", "weight": 3 }, { "content": "The man quickly runs up a metal staircase, walks a few steps, then quickly runs up another metal staircase.", "type": "action", "weight": 3 }, { "content": "he camera follows the man's movement, capturing the process of him running up the staircase from the side.", "type": "camera", "weight": 3 }, { "content": "Then, the camera cuts to a character dressed in an orange and red outfit, resembling a little duck.", "type": "camera", "weight": 3 }, { "content": "The little duck runs a few steps and stops.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to the man and woman. The man approaches the woman, who is still tied to a raised platform. The background reveals a high-tech laboratory, filled with various instruments and control panels. The man unties the silk scarf around the woman's wrists. Once her hands are free, the woman moves the rope around her neck, loosening it.", "visual_elements": [ { "content": "The camera cuts to the man and the woman.", "type": "camera", "weight": 3 }, { "content": "The man comes to the woman's side.", "type": "attribute", "weight": 3 }, { "content": "The woman is still tied to the platform.", "type": "attribute", "weight": 3 }, { "content": "The background is a high-tech laboratory, with many instruments and control panels surrounding the area.", "type": "scene", "weight": 2 }, { "content": "The man unties the woman's silk scarf from her hands.", "type": "action", "weight": 3 }, { "content": "After freeing her hands, the woman moves the rope from around her neck.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0319", "video_path": "VELOCITI/v_NXwxYIjqocA_seg_155_165.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera begins by showing a massive creature, covered in spikes and tentacles. Its skin is a mix of flesh tones and red. The background features an industrial-style setting, with metal structures and pipes surrounding the creature.\nThe camera cuts to a man and a woman. The man is wearing a floral shirt and long pants, while the woman is dressed in a printed blouse and a skirt. Upon seeing the creature, both of them stop in their tracks.\nThe camera cuts back to the creature, whose eyes emit green beams of light, striking the man and woman. Green light spots appear on both of their chests, glowing and shimmering brightly.\nThe camera cuts again, showing the creature using its tentacles to wrap around a character dressed as a little duck, wearing colorful clothes.", "events": [ { "event": "The camera begins by showing a massive creature, covered in spikes and tentacles. Its skin is a mix of flesh tones and red. The background features an industrial-style setting, with metal structures and pipes surrounding the creature.", "visual_elements": [ { "content": "The camera starts by showing a huge monster.", "type": "camera", "weight": 3 }, { "content": "The monster is covered in spikes and tentacles.", "type": "attribute", "weight": 3 }, { "content": "The monster's skin is a mix of flesh-colored and red.", "type": "attribute", "weight": 3 }, { "content": "The background is an industrial-style setting, with metal structures and pipes all around.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a man and a woman. The man is wearing a floral shirt and long pants, while the woman is dressed in a printed blouse and a skirt. Upon seeing the creature, both of them stop in their tracks.", "visual_elements": [ { "content": "The camera cuts to a man and a woman.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a floral shirt and trousers.", "type": "attribute", "weight": 3 }, { "content": "The woman is wearing a printed shirt and a skirt.", "type": "attribute", "weight": 3 }, { "content": "The man and the woman stop after seeing the monster.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the creature, whose eyes emit green beams of light, striking the man and woman. Green light spots appear on both of their chests, glowing and shimmering brightly.", "visual_elements": [ { "content": "The camera cuts back to the monster.", "type": "camera", "weight": 3 }, { "content": "The monster's eyes emit green beams of light.", "type": "action", "weight": 3 }, { "content": "The green light strikes the man and the woman.", "type": "action", "weight": 3 }, { "content": "Green light spots appear on the chest of both the man and the woman.", "type": "attribute", "weight": 2 }, { "content": "The green light spots sparkle.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts again, showing the creature using its tentacles to wrap around a character dressed as a little duck, wearing colorful clothes.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "The monster uses its tentacles to wrap around a character dressed in colorful clothes, resembling a little duck.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 15 }, { "index": "TUNA_0320", "video_path": "VELOCITI/v_NmNveyfhpBg_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on the exterior wall of a house. The wall is made of light-colored wood panels, and in the background, you can see some greenery and a wooden fence.\nThe scene then shifts to the right as a young man, dressed in a brown top and wearing a light-colored hat, walks backward out of the house from the right side of the scene. He’s holding a blue plastic box filled with aluminum cans. The scene pans left as the young man turns while holding the box, startled by a man entering from the left side of the scene. The young man quickly throws the box to the newcomer, who is wearing a dark jacket. The scene shifts right, and the young man swiftly turns around and rushes back into the house.\nA few seconds later, the young man walks out of the house again. The scene shifts left as the young man approaches the man in the dark jacket and speaks to him. The man in the dark jacket first glances to his right, then reaches out, covering the young man’s mouth with his hand. With one hand covering the young man's mouth and the other pressing on his neck, the man in the dark jacket begins to drag the young man toward the side of the house.", "events": [ { "event": "The video begins with the camera focused on the exterior wall of a house. The wall is made of light-colored wood panels, and in the background, you can see some greenery and a wooden fence.", "visual_elements": [ { "content": "The video begins with the camera focused on the exterior wall of a house.", "type": "camera", "weight": 3 }, { "content": "The wall is made of light-colored wood panels.", "type": "scene", "weight": 1 }, { "content": "In the background, some green plants and a wooden fence are visible.", "type": "scene", "weight": 1 } ] }, { "event": "The scene then shifts to the right as a young man, dressed in a brown top and wearing a light-colored hat, walks backward out of the house from the right side of the scene. He’s holding a blue plastic box filled with aluminum cans. The scene pans left as the young man turns while holding the box, startled by a man entering from the left side of the scene. The young man quickly throws the box to the newcomer, who is wearing a dark jacket. The scene shifts right, and the young man swiftly turns around and rushes back into the house.", "visual_elements": [ { "content": "Then, a young man wearing a brown shirt and a light-colored hat walks backwards out of the house from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "He is holding a blue plastic box, filled with aluminum cans.", "type": "attribute", "weight": 3 }, { "content": "The camera pans left.", "type": "camera", "weight": 3 }, { "content": "The young man turns around while holding the box and is startled by a man entering from the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The young man throws the box in his hands to the man who just entered the scene.", "type": "action", "weight": 3 }, { "content": "The man who just entered the scene is wearing a dark jacket.", "type": "attribute", "weight": 2 }, { "content": "The camera pans right.", "type": "camera", "weight": 3 }, { "content": "Then, the young man quickly turns and goes back into the house.", "type": "action", "weight": 3 } ] }, { "event": "A few seconds later, the young man walks out of the house again. The scene shifts left as the young man approaches the man in the dark jacket and speaks to him. The man in the dark jacket first glances to his right, then reaches out, covering the young man’s mouth with his hand. With one hand covering the young man's mouth and the other pressing on his neck, the man in the dark jacket begins to drag the young man toward the side of the house.", "visual_elements": [ { "content": "A few seconds later, the young man walks out of the house again.", "type": "action", "weight": 3 }, { "content": "The camera pans left.", "type": "camera", "weight": 3 }, { "content": "The young man speaks to the man in the dark jacket.", "type": "action", "weight": 3 }, { "content": "The man in the dark jacket looks to his right, then reaches out and covers the young man's mouth.", "type": "action", "weight": 3 }, { "content": "The man in the dark jacket covers the young man's mouth with one hand and presses on his neck with the other, about to drag him to the side of the house.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0321", "video_path": "VELOCITI/v_ORrQKFliVLM_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on an orange ferry, with a vast stretch of water and a distant city skyline in the background. Two characters appear in the scene. On the right side of the scene, there’s a figure dressed in a red and blue tight suit, with their head covered by a red mask, leaving only white eye holes visible. On the left side of the scene is another character, a Black man wearing a dark jacket and yellow cuffs on his sleeves.\nThe scene cuts to reveal a character wearing a metal helmet, with glowing green eyes. The character also has mechanical wings attached to their back.\nThe scene cuts again, showing the character with mechanical wings flying through the air towards the orange ferry. On the ferry's deck, the figure in the tight suit, the Black man, and several other people can be seen.\nThe scene cuts again, showing the Black man running toward the other side of the deck. The figure in the tight suit extends his wrist and fires a white web-like thread at the Black man. The web is abruptly cut, sparking with electric flashes.\nThe scene cuts again to the character with mechanical wings flying through the air, with a sky filled with thick, swirling clouds in the background.\nThe scene quickly shifts to the figure in the tight suit, with a backdrop of silver cars. The cars are arranged in two rows, lining both sides of the ferry's aisle. The character in the tight-fitting clothing shoots out white spider's silk.\nThe camera cuts to a character with mechanical wings. While flying in the sky, one of the wings gets tangled in white spider silk.\nThe camera cuts to a figure in a tight suit on the deck. The figure pulls on the spider silk with their hands, tugging it forcefully.\nThe camera cuts back to the character with mechanical wings. While flying in the air, they become entangled in the white spider silk. Sparks fly from their body as they're hit by gunfire.\nThe camera cuts again. In the foreground, the character with mechanical wings is flying through the air, while in the background, an orange ferry appears. On the second level of the ferry, three men are holding guns and shooting at the character with mechanical wings.", "events": [ { "event": "The video begins with the camera focused on an orange ferry, with a vast stretch of water and a distant city skyline in the background. Two characters appear in the scene. On the right side of the scene, there’s a figure dressed in a red and blue tight suit, with their head covered by a red mask, leaving only white eye holes visible. On the left side of the scene is another character, a Black man wearing a dark jacket and yellow cuffs on his sleeves.", "visual_elements": [ { "content": "The video begins with the camera focused on an orange ferry.", "type": "camera", "weight": 3 }, { "content": "The background is an open water surface with the city skyline in the distance.", "type": "scene", "weight": 2 }, { "content": "Two people appear in the scene.", "type": "scene", "weight": 2 }, { "content": "A character in a red and blue tight-fitting suit appears on the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "The character in the tight-fitting suit has their head wrapped in a red mask, with white eyes.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the scene is another Black man wearing a dark jacket.", "type": "attribute", "weight": 3 }, { "content": "The Black man is wearing yellow cuffs.", "type": "attribute", "weight": 3 } ] }, { "event": "The scene cuts to reveal a character wearing a metal helmet, with glowing green eyes. The character also has mechanical wings attached to their back.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "a character wearing a metal helmet, with glowing green eyes. The character also has mechanical wings attached to their back.", "type": "attribute", "weight": 3 } ] }, { "event": "The scene cuts again, showing the character with mechanical wings flying through the air towards the orange ferry. On the ferry's deck, the figure in the tight suit, the Black man, and several other people can be seen.", "visual_elements": [ { "content": "The scene cuts again.", "type": "camera", "weight": 3 }, { "content": "The scene cuts again, showing the character with mechanical wings flying through the air towards the orange ferry.", "type": "action", "weight": 3 }, { "content": "On the ferry's deck, the figure in the tight suit, the Black man, and several other people can be seen.", "type": "attribute", "weight": 2 } ] }, { "event": "The scene cuts again, showing the Black man running toward the other side of the deck. The figure in the tight suit extends his wrist and fires a white web-like thread at the Black man. The web is abruptly cut, sparking with electric flashes.", "visual_elements": [ { "content": "The scene cuts again.", "type": "camera", "weight": 3 }, { "content": "showing the Black man running toward the other side of the deck.", "type": "action", "weight": 3 }, { "content": "The person in the tight suit extends their wrist and shoots white spider silk toward the Black man.", "type": "action", "weight": 3 }, { "content": "The white spider silk is interrupted, generating sparks of electricity.", "type": "action", "weight": 2 } ] }, { "event": "The scene cuts again to the character with mechanical wings flying through the air, with a sky filled with thick, swirling clouds in the background.", "visual_elements": [ { "content": "The camera cuts again to the character with mechanical wings.", "type": "camera", "weight": 3 }, { "content": "The character with mechanical wings is flying in the air.", "type": "action", "weight": 3 }, { "content": "The background is a sky filled with dense clouds.", "type": "scene", "weight": 2 } ] }, { "event": "The scene quickly shifts to the figure in the tight suit, with a backdrop of silver cars. The cars are arranged in two rows, lining both sides of the ferry's aisle. The character in the tight-fitting clothing shoots out white spider's silk.", "visual_elements": [ { "content": "The camera quickly pans to the person in the tight suit.", "type": "camera", "weight": 3 }, { "content": "Behind the person in the tight suit, there are several silver cars, arranged in two rows on either side of a ferry's walkway.", "type": "scene", "weight": 1 }, { "content": "The person in the tight suit fires another burst of white spider silk.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a character with mechanical wings. While flying in the sky, one of the wings gets tangled in white spider silk.", "visual_elements": [ { "content": "The camera pans to the character with mechanical wings again.", "type": "camera", "weight": 3 }, { "content": "While flying in the sky, the character with mechanical wings gets one of their legs tangled in the white spider silk.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a figure in a tight suit on the deck. The figure pulls on the spider silk with their hands, tugging it forcefully.", "visual_elements": [ { "content": "The camera cuts to the person in the tight suit on the deck.", "type": "camera", "weight": 3 }, { "content": "The person in the tight suit grabs the spider silk with their hand and pulls it forcefully.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the character with mechanical wings. While flying in the air, they become entangled in the white spider silk. Sparks fly from their body as they're hit by gunfire.", "visual_elements": [ { "content": "The camera pans again to the character with mechanical wings.", "type": "camera", "weight": 3 }, { "content": "The character with mechanical wings is flying in the air, now completely entangled in the white spider silk.", "type": "action", "weight": 3 }, { "content": "Sparks appear on the mechanical wings character's body as if they've been shot.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts again. In the foreground, the character with mechanical wings is flying through the air, while in the background, an orange ferry appears. On the second level of the ferry, three men are holding guns and shooting at the character with mechanical wings.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "In the foreground, the character with mechanical wings is seen flying in the air.", "type": "scene", "weight": 2 }, { "content": "In the background, there is an orange ferry. On the second level of the ferry, three men are holding guns and shooting at the character with mechanical wings.", "type": "scene", "weight": 2 }, { "content": "On the second level of the ferry, three men are holding guns and shooting at the character with mechanical wings.", "type": "action", "weight": 2 } ] } ], "n_events": 10, "n_elements": 33 }, { "index": "TUNA_0322", "video_path": "VELOCITI/v_Oc2xTMnIwrI_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video starts with the camera focused on an adult malewearing a light-colored long-sleeve shirt, standing in a dimly lit room. In the background, the iron bars of a window are visible, with light filtering in from outside.\nThe camera then cuts to a young boy sitting on the other side of the room. He wears a white baseball jersey with \"BEARS\" printed on it, and the jersey has yellow stripes along the edges. The boy's hair is golden and shoulder-length. The background features a green wall. The boy stands up.\nThe camera then cuts back to the adult male, who is still standing in the same spot. He begins to speak.\n4o mini\nThe camera then cuts to a shot from behind the boy, who is bending down to gather his things. On the back of his baseball jersey, the words \"CHICO'S BAIL BONDS\" and \"Let Freedom Ring\" are printed. The boy throws his baseball cap to the side and turns his head to speak toward his left, moving toward the door as he talks. The camera follows his movement, shifting slightly to the right. Outside the door, there is grass and a distant building in the background.", "events": [ { "event": "The video starts with the camera focused on an adult malewearing a light-colored long-sleeve shirt, standing in a dimly lit room. In the background, the iron bars of a window are visible, with light filtering in from outside.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on an adult male.", "type": "camera", "weight": 3 }, { "content": "The adult male is wearing a light-colored long-sleeve shirt and standing in a dimly lit room.", "type": "attribute", "weight": 3 }, { "content": "In the background, iron bars can be seen on the window, with light filtering in from outside.", "type": "action", "weight": 2 } ] }, { "event": "The camera then cuts to a young boy sitting on the other side of the room. He wears a white baseball jersey with \"BEARS\" printed on it, and the jersey has yellow stripes along the edges. The boy's hair is golden and shoulder-length. The background features a green wall. The boy stands up.", "visual_elements": [ { "content": "Then, the camera cuts to a young boy.", "type": "camera", "weight": 3 }, { "content": "The young boy wears a white baseball jersey with the word \"BEARS\" printed on it, and the shirt has yellow decorative stripes.", "type": "attribute", "weight": 3 }, { "content": "The boy has golden hair that reaches his shoulders.", "type": "attribute", "weight": 3 }, { "content": "The background features a green wall.", "type": "scene", "weight": 1 }, { "content": "The boy stands up.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts back to the adult male, who is still standing in the same spot. He begins to speak.\n4o mini", "visual_elements": [ { "content": "The camera then pans back to the adult male.", "type": "camera", "weight": 3 }, { "content": "The male begins to speak.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a shot from behind the boy, who is bending down to gather his things. On the back of his baseball jersey, the words \"CHICO'S BAIL BONDS\" and \"Let Freedom Ring\" are printed. The boy throws his baseball cap to the side and turns his head to speak toward his left, moving toward the door as he talks. The camera follows his movement, shifting slightly to the right. Outside the door, there is grass and a distant building in the background.", "visual_elements": [ { "content": "The camera then shifts to the boy's back.", "type": "scene", "weight": 3 }, { "content": "The boy is bent over, packing things up.", "type": "action", "weight": 3 }, { "content": "The back of the boy's baseball jersey has the words \"CHICO'S BAIL BONDS\" and \"Let Freedom Ring\" printed on it.", "type": "attribute", "weight": 3 }, { "content": "The boy throws the baseball cap from his hand and turns his head to speak to his left behind him.", "type": "action", "weight": 3 }, { "content": "As the boy speaks, he moves towards the door.", "type": "action", "weight": 3 }, { "content": "The camera follows the boy's movement, shifting to the right.", "type": "camera", "weight": 3 }, { "content": "Outside, the background features a grassy area and distant buildings.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0323", "video_path": "VELOCITI/v_OinSJLNXgA0_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with two people skiing on a snowy slope. The person on the left is wearing a black ski suit, while the person on the right is dressed in a pink and black ski suit, with an orange hat and goggles. The person in the black ski suit is holding ski poles, and both are wearing skis. The person on the right places their left hand on the left person’s shoulder to maintain balance.\nThe camera then cuts to their feet, showing their skis gliding across the snow.\nThe camera cuts back to the upper bodies of the two people. The person on the right continues to keep their hand on the left person's shoulder, trying to maintain balance. They exchange a few words with each other. In the background, some trees and other skiers can be seen.\nThe camera cuts again to their feet, where their skis are crossed over each other.\nThe camera cuts to the upper bodies of the two. The person in the pink ski suit is tightly holding onto the person in the black ski suit. The person in the black ski suit looks terrified.", "events": [ { "event": "The video begins with two people skiing on a snowy slope. The person on the left is wearing a black ski suit, while the person on the right is dressed in a pink and black ski suit, with an orange hat and goggles. The person in the black ski suit is holding ski poles, and both are wearing skis. The person on the right places their left hand on the left person’s shoulder to maintain balance.", "visual_elements": [ { "content": "At the beginning of the video, two people are skiing on the snow in the scene.", "type": "camera", "weight": 3 }, { "content": "The person on the left side of the scene is wearing a black ski suit and black goggles.", "type": "attribute", "weight": 3 }, { "content": "The person on the right is wearing a pink and black ski suit, an orange hat, and black-and-white goggles.", "type": "attribute", "weight": 3 }, { "content": "The person in the black ski suit is holding ski poles.", "type": "attribute", "weight": 3 }, { "content": "Both individuals are wearing skis on their feet.", "type": "attribute", "weight": 3 }, { "content": "The person on the right places their left hand on the person on the left to maintain balance.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to their feet, showing their skis gliding across the snow.", "visual_elements": [ { "content": "The camera then pans to focus on their feet.", "type": "camera", "weight": 3 }, { "content": "The skis are gliding on the snow.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the upper bodies of the two people. The person on the right continues to keep their hand on the left person's shoulder, trying to maintain balance. They exchange a few words with each other. In the background, some trees and other skiers can be seen.", "visual_elements": [ { "content": "The camera pans back to focus on the upper bodies of the two individuals.", "type": "camera", "weight": 3 }, { "content": "The person on the right continues to place their hand on the person on the left, trying to maintain balance.", "type": "action", "weight": 3 }, { "content": "The two individuals exchange words with each other.", "type": "action", "weight": 3 }, { "content": "In the background, there are trees and other skiers visible.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts again to their feet, where their skis are crossed over each other.", "visual_elements": [ { "content": "The camera cuts again to focus on their feet again.", "type": "camera", "weight": 3 }, { "content": "The skis of the two individuals are crossed over each other.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the upper bodies of the two. The person in the pink ski suit is tightly holding onto the person in the black ski suit. The person in the black ski suit looks terrified.", "visual_elements": [ { "content": "The camera cuts back to focus on the upper bodies of the two individuals.", "type": "camera", "weight": 3 }, { "content": "The person in the pink ski suit tightly hugs the person in the black ski suit around the upper body.", "type": "attribute", "weight": 3 }, { "content": "The person in the black ski suit looks extremely terrified.", "type": "attribute", "weight": 2 } ] } ], "n_events": 5, "n_elements": 17 }, { "index": "TUNA_0324", "video_path": "VELOCITI/v_OinSJLNXgA0_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a snowy landscape, with tall pine trees and distant mountains in the background. Several people are skiing across the snow. In the center of the frame, two skiers are embracing each other, and the camera moves forward as they ski. The skier with their back to the camera is wearing a black ski suit, while the person facing them is dressed in a pink ski suit. The two are tightly holding each other as they ski forward.\nThe camera then cuts to other skiers. The two skiers who were embracing enter the frame from the right side, gliding down a snow-covered slope as they move forward.\nThe camera zooms in on their skis, with one skier's ski placed horizontally across the other skier's ski. Then, the shot shifts to a close-up of the two of them, still tightly embracing, as trees blur quickly in the background. Suddenly, the skier in the black ski suit falls to the snow, sending a spray of snowflakes into the air. The skier in the pink and black suit continues skiing forward, while other skiers watch them. Finally, the camera zooms in to focus on the skier in the black suit, who is lying face down in the snow.", "events": [ { "event": "The video begins with the camera focused on a snowy landscape, with tall pine trees and distant mountains in the background. Several people are skiing across the snow. In the center of the frame, two skiers are embracing each other, and the camera moves forward as they ski. The skier with their back to the camera is wearing a black ski suit, while the person facing them is dressed in a pink ski suit. The two are tightly holding each other as they ski forward.", "visual_elements": [ { "content": "The video begins with the camera focused on a snowy landscape.", "type": "camera", "weight": 3 }, { "content": "Tall pine trees and distant mountains in the background.", "type": "scene", "weight": 2 }, { "content": "Several people are skiing across the snow in the frame.", "type": "scene", "weight": 2 }, { "content": "In the center of the frame, two skiers are embracing each other", "type": "attribute", "weight": 3 }, { "content": "The camera moves forward as they ski.", "type": "camera", "weight": 3 }, { "content": "The skier with their back to the camera is wearing a black ski suit", "type": "attribute", "weight": 3 }, { "content": "the person in black ski suit and pink ski suit are hugging each other", "type": "attribute", "weight": 2 }, { "content": "The two are tightly holding each other as they ski forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to other skiers. The two skiers who were embracing enter the frame from the right side, gliding down a snow-covered slope as they move forward.", "visual_elements": [ { "content": "The camera then cuts to other skiers.", "type": "camera", "weight": 3 }, { "content": "The two skiers who were embracing enter the frame from the right side.", "type": "camera", "weight": 3 }, { "content": "Both of them gliding down a snow-covered slope as they move forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera zooms in on their skis, with one skier's ski placed horizontally across the other skier's ski. Then, the shot shifts to a close-up of the two of them, still tightly embracing, as trees blur quickly in the background. Suddenly, the skier in the black ski suit falls to the snow, sending a spray of snowflakes into the air. The skier in the pink and black suit continues skiing forward, while other skiers watch them. Finally, the camera zooms in to focus on the skier in the black suit, who is lying face down in the snow.", "visual_elements": [ { "content": "The camera zooms in on their skis.", "type": "camera", "weight": 3 }, { "content": "One skier's ski placed horizontally across the other skier's ski.", "type": "attribute", "weight": 2 }, { "content": "Then, the shot shifts to a close-up of the two of them.", "type": "camera", "weight": 3 }, { "content": "The two of them, still tightly embracing eachother.", "type": "action", "weight": 3 }, { "content": "Trees blur quickly in the background.", "type": "action", "weight": 2 }, { "content": "Suddenly, the skier in the black ski suit falls to the snow, sending a spray of snowflakes into the air.", "type": "action", "weight": 3 }, { "content": "The skier in the pink and black suit continues skiing forward", "type": "action", "weight": 2 }, { "content": "Other skiers in the surroundings watching both of them.", "type": "scene", "weight": 1 }, { "content": "Finally, the camera zooms in to focus on the skier in the black suit, who is lying face down in the snow.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0325", "video_path": "VELOCITI/v_OinSJLNXgA0_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a man in a black ski suit, lying face down in the snow, holding ski poles in his hands. His hair is curly, and he is wearing goggles and black gloves. Drool is dripping from his mouth as he speaks. The man is wearing skis on both feet. He tries to get up from the ground, pushing his hands against the snow, struggling to stand.\nThe camera then cuts to another skier, a woman in a vibrant ski outfit. She is wearing an orange hat and goggles, with a pink and purple ski suit. The woman turns her head to speak behind her, then quickly looks back ahead. In the background, tall pine trees are visible, and sunlight filters through the leaves, casting a warm glow on the snow.\n4o mini\nThe camera then zooms in on the skier's skis, which leave a trail of marks in the snow as they glide forward, with snowflakes flying up around them.\nThe camera cuts again to a wide shot of the ski resort, showing several skiers gliding down the slopes. The skiers move quickly across the frame, with a dense forest of pine trees in the background.\nThe camera cuts back to the man in the black ski suit as he continues skiing. It follows his movement, capturing him closely trailing behind the woman in the vibrant ski outfit.", "events": [ { "event": "The video begins with the camera focused on a man in a black ski suit, lying face down in the snow, holding ski poles in his hands. His hair is curly, and he is wearing goggles and black gloves. Drool is dripping from his mouth as he speaks. The man is wearing skis on both feet. He tries to get up from the ground, pushing his hands against the snow, struggling to stand.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a man wearing a black ski suit.", "type": "camera", "weight": 3 }, { "content": "The man in the black ski suit is lying on the snow, holding ski poles in his hands.", "type": "attribute", "weight": 2 }, { "content": "The man has curly hair, wearing goggles and black gloves.", "type": "attribute", "weight": 2 }, { "content": "The man in the black skiing outfit is drooling while talking.", "type": "action", "weight": 3 }, { "content": "The man in the black skiing outfit has two skis attached to his feet.", "type": "attribute", "weight": 2 }, { "content": "The man in the black skiing outfit uses his hands to push against the ground, struggling to stand up.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to another skier, a woman in a vibrant ski outfit. She is wearing an orange hat and goggles, with a pink and purple ski suit. The woman turns her head to speak behind her, then quickly looks back ahead. In the background, tall pine trees are visible, and sunlight filters through the leaves, casting a warm glow on the snow.\n4o mini", "visual_elements": [ { "content": "The camera then cuts to another skier, a woman in a vibrant ski outfit.", "type": "camera", "weight": 3 }, { "content": "The woman in the bright skiing outfit is wearing an orange hat and goggles, along with a skiing suit that alternates between pink and purple colors.", "type": "attribute", "weight": 2 }, { "content": "The woman in the bright skiing outfit is currently skiing.", "type": "action", "weight": 3 }, { "content": "The woman in the bright skiing outfit turns her head to speak to someone behind her, then turns back to look ahead.", "type": "action", "weight": 3 }, { "content": "In the background, tall pine trees can be seen, with sunlight filtering through the leaves and casting spots on the snowy ground.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then zooms in on the skier's skis, which leave a trail of marks in the snow as they glide forward, with snowflakes flying up around them.", "visual_elements": [ { "content": "Subsequently, the camera zooms in on the skier's skis.", "type": "camera", "weight": 3 }, { "content": "The skis carve tracks in the snow.", "type": "action", "weight": 2 }, { "content": "Snowflakes fly up in all directions.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera cuts again to a wide shot of the ski resort, showing several skiers gliding down the slopes. The skiers move quickly across the frame, with a dense forest of pine trees in the background.", "visual_elements": [ { "content": "The camera cuts again, showing a panoramic view of the ski resort.", "type": "camera", "weight": 3 }, { "content": "Several skiers glide down the snow-covered slopes, quickly passing by the camera lens.", "type": "action", "weight": 2 }, { "content": "The background is a dense pine forest.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts back to the man in the black ski suit as he continues skiing. It follows his movement, capturing him closely trailing behind the woman in the vibrant ski outfit.", "visual_elements": [ { "content": "The camera once again returns to the man in the black skiing outfit.", "type": "camera", "weight": 3 }, { "content": "The man in the black skiing outfit continues to ski.", "type": "action", "weight": 3 }, { "content": "The camera follows the back of the man in the black skiing outfit.", "type": "camera", "weight": 3 }, { "content": "The woman in the bright skiing outfit skis ahead of the man in the black skiing outfit.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 21 }, { "index": "TUNA_0326", "video_path": "VELOCITI/v_OkphsYRRJ_0_seg_85_95.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a woman wearing a red suit with a low neckline. Her hair is short and curly. She stands behind a table, leaning slightly forward, with both hands resting on the surface as she engages in conversation. She then pulls out a yellow chair and sits down. The table is cluttered with various office supplies and decorative items. On the wall behind her, several modern art-style paintings hang, with sculptures and other decorative objects placed nearby. The room’s overall color scheme is dominated by yellow.\nThe camera cuts to another woman, dressed in a black uniform, holding a red hat in her hands. She struggles to brace her back against the door, preventing anyone from entering. After a fall, she gets back up, continuing to speak with the woman in the red suit. On either side of the door, two posters of muscular men in pink hues are pinned to the walls.\nThe camera cuts back to the woman in the red suit. She is sitting in the chair behind the desk, picking up the phone from the table.", "events": [ { "event": "The video begins with the camera focused on a woman wearing a red suit with a low neckline. Her hair is short and curly. She stands behind a table, leaning slightly forward, with both hands resting on the surface as she engages in conversation. She then pulls out a yellow chair and sits down. The table is cluttered with various office supplies and decorative items. On the wall behind her, several modern art-style paintings hang, with sculptures and other decorative objects placed nearby. The room’s overall color scheme is dominated by yellow.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a woman wearing a red suit.", "type": "camera", "weight": 3 }, { "content": "The woman in the red suit has a low neckline, and her hair is short and curly.", "type": "attribute", "weight": 2 }, { "content": "She stands behind a desk, leaning slightly forward.", "type": "action", "weight": 3 }, { "content": "With her hands braced on the desk, she is engaged in conversation with someone.", "type": "action", "weight": 3 }, { "content": "The woman in the red suit then pulls a yellow chair next to her and sits down.", "type": "action", "weight": 3 }, { "content": "The desk is cluttered with various office supplies and decorations.", "type": "attribute", "weight": 1 }, { "content": "On the background wall, several modern art paintings hang, and there are also sculptures and decorative objects along the wall.", "type": "scene", "weight": 1 }, { "content": "The overall color tone of the room is yellow.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to another woman, dressed in a black uniform, holding a red hat in her hands. She struggles to brace her back against the door, preventing anyone from entering. After a fall, she gets back up, continuing to speak with the woman in the red suit. On either side of the door, two posters of muscular men in pink hues are pinned to the walls.", "visual_elements": [ { "content": "The camera cuts to another woman.", "type": "camera", "weight": 3 }, { "content": "The other woman wears a black uniform and holds a black and red hat in her hand.", "type": "attribute", "weight": 2 }, { "content": "The woman in the black uniform uses her back to push against the door, preventing someone from coming in from outside.", "type": "action", "weight": 3 }, { "content": "The woman in the black uniform falls down but quickly stands up again.", "type": "action", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The woman in the black uniform keeps talking to someone in front of her.", "type": "action", "weight": 3 }, { "content": "On the walls on both sides of the door, two pink posters of male bodybuilders are pasted.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the woman in the red suit. She is sitting in the chair behind the desk, picking up the phone from the table.", "visual_elements": [ { "content": "The camera cuts back to the woman in the red suit.", "type": "camera", "weight": 3 }, { "content": "The woman in the red suit sits in the chair behind the desk.", "type": "attribute", "weight": 2 }, { "content": "The woman in the red suit picks up the phone from the table.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0327", "video_path": "VELOCITI/v_PBaQezez_UU_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a man sitting at a table. He is wearing a dark-colored shirt, and in the background, there is a dim light casting a soft glow.\nThe camera then cuts to an old wooden door, its surface worn with peeling paint and iron bars. Nearby, there are some tools and miscellaneous items scattered around. A woman in a white long dress enters, pushing the door open. She has a backpack slung over her shoulder, and her long yellow hair falls loosely around her shoulders. She then closes the door behind her.\nThe camera cuts back to the man, who still sits at the table. He has turned his head to the other side.\nThe camera cuts to the woman as she continues walking into the room. In the background, some sheet music is visible. She approaches a piano, removes the backpack from her shoulder, and sets it down.\nThe camera cuts back to the man, whose head turns toward another woman standing beside him. She is wearing a white top with a bow tie at the collar, a skirt, and an apron, holding a cloth bag in her hands. She places the bag on the table and begins to tidy up the cups. The man remains seated, his gaze returning to the cloth in his hands.\nThe camera cuts back to the woman in the white long dress as she turns and walks to the other side of the room.", "events": [ { "event": "The video begins with the camera focused on a man sitting at a table. He is wearing a dark-colored shirt, and in the background, there is a dim light casting a soft glow.", "visual_elements": [ { "content": "The video begins with the camera focused on a man sitting at a table.", "type": "camera", "weight": 3 }, { "content": "The man wears a dark-colored shirt.", "type": "attribute", "weight": 2 }, { "content": "In the background, there is a dim light.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then cuts to an old wooden door, its surface worn with peeling paint and iron bars. Nearby, there are some tools and miscellaneous items scattered around. A woman in a white long dress enters, pushing the door open. She has a backpack slung over her shoulder, and her long yellow hair falls loosely around her shoulders. She then closes the door behind her.", "visual_elements": [ { "content": "The camera then cuts to an old wooden door.", "type": "camera", "weight": 3 }, { "content": "There are iron bars and peeling paint on the door.", "type": "attribute", "weight": 2 }, { "content": "There are some tools and sundries.", "type": "attribute", "weight": 2 }, { "content": "Then, a woman wearing a long white dress pushes the door open and walks in.", "type": "action", "weight": 3 }, { "content": "The woman in the long white dress carries a backpack on her shoulder, and her long yellow hair cascades over it.", "type": "attribute", "weight": 2 }, { "content": "Then, the woman in the long white dress closes the door.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the man, who still sits at the table. He has turned his head to the other side.", "visual_elements": [ { "content": "The camera cuts back to the man again.", "type": "camera", "weight": 3 }, { "content": "The man still sits at the table.", "type": "attribute", "weight": 2 }, { "content": "The man turns his head to the other side.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the woman as she continues walking into the room. In the background, some sheet music is visible. She approaches a piano, removes the backpack from her shoulder, and sets it down.", "visual_elements": [ { "content": "Then the camera cuts to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman in the long white dress continues to walk into the room.", "type": "action", "weight": 3 }, { "content": "In the background, some music sheets can be seen.", "type": "scene", "weight": 1 }, { "content": "A woman in a white long dress walks over to a piano and takes her backpack off her shoulder.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the man, whose head turns toward another woman standing beside him. She is wearing a white top with a bow tie at the collar, a skirt, and an apron, holding a cloth bag in her hands. She places the bag on the table and begins to tidy up the cups. The man remains seated, his gaze returning to the cloth in his hands.", "visual_elements": [ { "content": "The camera cuts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man turns his head towards another woman beside him.", "type": "action", "weight": 3 }, { "content": "The woman wears a white top with a bow tie at the collar, a skirt, and an apron.", "type": "attribute", "weight": 2 }, { "content": "The man turns his head towards another woman beside him.", "type": "attribute", "weight": 2 }, { "content": "The woman in the apron is holding a cloth bag in her hand.", "type": "action", "weight": 3 }, { "content": "The man's gaze returns to the cloth in his hands.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts back to the woman in the white long dress as she turns and walks to the other side of the room.", "visual_elements": [ { "content": "The camera cuts back to the woman in the white long dress.", "type": "camera", "weight": 3 }, { "content": "The woman in the white long dress turns and walks to the other side of the room.", "type": "action", "weight": 2 } ] } ], "n_events": 6, "n_elements": 24 }, { "index": "TUNA_0328", "video_path": "VELOCITI/v_PLOSA5L0dxE_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a young man sitting in a chair. He wears a white shirt.\nThe camera slightly moves upward, revealing another man standing in front of him. This man is wearing a blue shirt and black pants, holding a piece of paper in his hand.\nThe camera cuts to the man in the blue shirt as he walks to the desk and sits down. While placing the paper from his hand onto the desk, he pulls out his phone from his waist. He glances at the screen, then closes the phone and puts it in the drawer. He presses his left hand down on the paper on the desk.\nThe camera cuts back to the young man, who is still sitting in the chair, his expression serious.\nThe camera then shifts to the man in the blue shirt. He looks at the paper on the desk and takes a deep breath.", "events": [ { "event": "The video begins with the camera focused on a young man sitting in a chair. He wears a white shirt.", "visual_elements": [ { "content": "The video begins with the camera focused on a young man sitting in a chair. He is wearing a white shirt.", "type": "camera", "weight": 3 }, { "content": "The man wears a white shirt.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera slightly moves upward, revealing another man standing in front of him. This man is wearing a blue shirt and black pants, holding a piece of paper in his hand.", "visual_elements": [ { "content": "The camera slightly moves upward, revealing another man standing in front of him.", "type": "camera", "weight": 3 }, { "content": "This man is wearing a blue shirt and black pants,", "type": "attribute", "weight": 2 }, { "content": "holding a piece of paper in his hand.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to the man in the blue shirt as he walks to the desk and sits down. While placing the paper from his hand onto the desk, he pulls out his phone from his waist. He glances at the screen, then closes the phone and puts it in the drawer. He presses his left hand down on the paper on the desk.", "visual_elements": [ { "content": "The camera cuts to the man in the blue shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the blue shirt walks to the desk and sits down.", "type": "action", "weight": 3 }, { "content": "The man in the blue shirt pulls out his phone from his pocket, while placing the paper he was holding onto the desk.", "type": "action", "weight": 3 }, { "content": "The man in the blue shirt first glances at his phone after opening it, and then closes it and puts it into a drawer.", "type": "action", "weight": 3 }, { "content": "The man in the blue shirt presses his left hand down on the paper on the desk.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the young man, who is still sitting in the chair, his expression serious.", "visual_elements": [ { "content": "Then the camera cuts back to the young man.", "type": "camera", "weight": 3 }, { "content": "The young man remains seated in the chair, with a serious expression on his face.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera then shifts to the man in the blue shirt. He looks at the paper on the desk and takes a deep breath.", "visual_elements": [ { "content": "The camera then cuts to the man in the blue shirt.", "type": "camera", "weight": 3 }, { "content": "The man in the blue shirt", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 14 }, { "index": "TUNA_0329", "video_path": "VELOCITI/v_PSe5x7y-kVY_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a young man wearing a sleeveless shirt, standing in front of a wooden door. In the background, a table lamp and some plants are visible.\nThe camera cuts to an older man, wearing a black suit, standing in front of a bookshelf. The shelf is lined with books and certificates. The older man turns his head to look at something in his hands.\nThe camera then pulls back to show both men standing in the center of the room. The older man bends down to place a document on the table, while the young man stands beside him, watching. After setting the item down, the older man straightens up. The red sofa and decorative paintings on the wall are clearly visible in the room, with soft, dim lighting creating a warm atmosphere.\nThe camera cuts to a close-up of the young man, who is crouching on the floor, looking up at the older man.\nThe camera cuts back to the older man, who is standing in the center of the room, continuing his conversation with the young man.\nThe camera cuts back to the young man, showing that he has now stood up and is speaking to the older man.\nThe camera cuts back to the older man, who looks up at the young man as he continues speaking.\nAt the end of the video, the camera zooms out again show the two men standing in the middle of the room.The two continue their conversation.The young man leans slightly forward and takes a step closer to the older manThe older man remains standing in place.", "events": [ { "event": "The video begins with the camera focused on a young man wearing a sleeveless shirt, standing in front of a wooden door. In the background, a table lamp and some plants are visible.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on the upper torso of a young man.", "type": "camera", "weight": 3 }, { "content": "The young man wears a sleeveless shirt.", "type": "attribute", "weight": 3 }, { "content": "In the background, a large door, a desk lamp, and some plants can be seen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to an older man, wearing a black suit, standing in front of a bookshelf. The shelf is lined with books and certificates. The older man turns his head to look at something in his hands.", "visual_elements": [ { "content": "The camera cuts to an older man.", "type": "camera", "weight": 3 }, { "content": "The older man  wears a black suit, and stands in front of a bookshelf.", "type": "attribute", "weight": 2 }, { "content": "The shelf is lined with books and certificates.", "type": "attribute", "weight": 1 }, { "content": "The older man turns his head to look at something in his hands.", "type": "action", "weight": 3 } ] }, { "event": "The camera then pulls back to show both men standing in the center of the room. The older man bends down to place a document on the table, while the young man stands beside him, watching. After setting the item down, the older man straightens up. The red sofa and decorative paintings on the wall are clearly visible in the room, with soft, dim lighting creating a warm atmosphere.", "visual_elements": [ { "content": "The camera then zooms out showing both men standing in the center of the room.", "type": "camera", "weight": 3 }, { "content": "The older man bends down to place a document on the table", "type": "action", "weight": 3 }, { "content": "The young man stands aside, watching the older man.", "type": "attribute", "weight": 3 }, { "content": "After setting the item down, the older man straightens up.", "type": "action", "weight": 3 }, { "content": "The red sofa and decorative paintings on the wall are clearly visible in the room.", "type": "scene", "weight": 1 }, { "content": "The lighting is dim and soft.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to a close-up of the young man, who is crouching on the floor, looking up at the older man.", "visual_elements": [ { "content": "The camera cuts to a close-up of the young man again.", "type": "camera", "weight": 3 }, { "content": "The young man crouches on the floor.", "type": "attribute", "weight": 2 }, { "content": "The young man looks up at the older man.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the older man, who is standing in the center of the room, continuing his conversation with the young man.", "visual_elements": [ { "content": "The camera cuts back to the older man.", "type": "camera", "weight": 3 }, { "content": "The older man stands in the center of the room", "type": "attribute", "weight": 2 }, { "content": "The older man continues his conversation with the young man.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the young man, showing that he has now stood up and is speaking to the older man.", "visual_elements": [ { "content": "The camera cuts back to the young man.", "type": "camera", "weight": 3 }, { "content": "Showing that the young man has now stood up and is speaking to the older man.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the older man, who looks up at the young man as he continues speaking.", "visual_elements": [ { "content": "The camera cuts back to the older man.", "type": "camera", "weight": 3 }, { "content": "The older man looks up at the young man as he continues speaking.", "type": "action", "weight": 3 } ] }, { "event": "At the end of the video, the camera zooms out again show the two men standing in the middle of the room.The two continue their conversation.The young man leans slightly forward and takes a step closer to the older manThe older man remains standing in place.", "visual_elements": [ { "content": "At the end of the video, the camera zooms out again show the two men standing in the middle of the room.", "type": "camera", "weight": 3 }, { "content": "The two continue their conversation.", "type": "action", "weight": 2 }, { "content": "The young man leans slightly forward and takes a step closer to the older man", "type": "action", "weight": 3 }, { "content": "The older man remains standing in place.", "type": "attribute", "weight": 2 } ] } ], "n_events": 8, "n_elements": 27 }, { "index": "TUNA_0330", "video_path": "VELOCITI/v_PuvONUFArdI_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a half-open door with a light yellow hue, featuring a silver handle. Two children stand at the doorway: one wears a gray sweater and stands sideways, while the other is dressed in a dark vest over a white short-sleeved shirt. The child in the dark vest holds the door with one hand, standing just behind it.\nThe camera shifts slightly downwards as the child in the gray sweater takes a few steps forward, places a hand on the door handle, and peers into the room. The child in the dark vest lets go of the door, turns around, and walks towards the hallway outside. A woman dressed in a nurse's uniform passes by the doorway, and the child in the gray sweater nods after glancing inside the room.\nAt this moment, the camera cuts to the interior of the room, revealing a person lying in bed. This individual wears light-colored pajamas, resting their head on a pillow with their hand placed beside their head. They look towards the door, smiling and nodding.\nThe camera cuts back to the doorway, where the child in the gray sweater holds the door handle and steps back, gradually closing the door.\nOnce again, the camera cuts to the person in bed, who remains in the same position. The bed is covered with a patterned quilt.", "events": [ { "event": "At the beginning of the video, the camera focuses on a half-open door with a light yellow hue, featuring a silver handle. Two children stand at the doorway: one wears a gray sweater and stands sideways, while the other is dressed in a dark vest over a white short-sleeved shirt. The child in the dark vest holds the door with one hand, standing just behind it.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a half-open door.", "type": "camera", "weight": 3 }, { "content": "The door has a light yellow hue, featuring a silver handle.", "type": "attribute", "weight": 1 }, { "content": "Two children stand at the doorway.", "type": "attribute", "weight": 3 }, { "content": "One child wears a gray sweater and stands sideways.", "type": "attribute", "weight": 2 }, { "content": "The child in the gray sweater stands sideways at the doorway.", "type": "attribute", "weight": 3 }, { "content": "The other child is wearing a dark-colored vest and a white short-sleeve shirt.", "type": "attribute", "weight": 2 }, { "content": "The child in the dark vest is holding the door with one hand, standing behind it.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera shifts slightly downwards as the child in the gray sweater takes a few steps forward, places a hand on the door handle, and peers into the room. The child in the dark vest lets go of the door, turns around, and walks towards the hallway outside. A woman dressed in a nurse's uniform passes by the doorway, and the child in the gray sweater nods after glancing inside the room.", "visual_elements": [ { "content": "The camera moves slightly downwards.", "type": "camera", "weight": 3 }, { "content": "The child in the gray sweater takes a few steps forward, holding the door handle and looking inside the room.", "type": "action", "weight": 3 }, { "content": "The child in the dark vest lets go of the handle and turns to walk towards the corridor outside the door.", "type": "action", "weight": 3 }, { "content": "A woman wearing a nurse's uniform passes by the door.", "type": "action", "weight": 3 }, { "content": "The child in the gray sweater looks inside the room and nods.", "type": "action", "weight": 3 } ] }, { "event": "At this moment, the camera cuts to the interior of the room, revealing a person lying in bed. This individual wears light-colored pajamas, resting their head on a pillow with their hand placed beside their head. They look towards the door, smiling and nodding.", "visual_elements": [ { "content": "At this moment, the camera cuts to the inside of the room, showing a person lying on a hospital bed.", "type": "camera", "weight": 3 }, { "content": "The woman on the hospital bed is wearing light-colored pajamas.", "type": "attribute", "weight": 2 }, { "content": "The woman lies on the bed with her head resting on the pillow and her hands beside her head.", "type": "attribute", "weight": 3 }, { "content": "The woman looks in the direction of the door, smiling and nodding.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the doorway, where the child in the gray sweater holds the door handle and steps back, gradually closing the door.", "visual_elements": [ { "content": "The camera cuts back to the doorway.", "type": "camera", "weight": 3 }, { "content": "The child in the gray sweater holds the door handle and steps back.", "type": "action", "weight": 3 }, { "content": "The child in the gray sweater gradually closes the door.", "type": "action", "weight": 3 } ] }, { "event": "Once again, the camera cuts to the person in bed, who remains in the same position. The bed is covered with a patterned quilt.", "visual_elements": [ { "content": "The camera cuts back to the person on the hospital bed.", "type": "action", "weight": 3 }, { "content": "The person on the hospital bed is still lying there, turning their head to look in the direction of the door.", "type": "attribute", "weight": 2 }, { "content": "The bed is covered with a quilt with floral patterns.", "type": "attribute", "weight": 1 } ] } ], "n_events": 5, "n_elements": 22 }, { "index": "TUNA_0331", "video_path": "VELOCITI/v_PyHK6QRniQ0_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a male figure appears on the screen wearing a red hat and a gray coat, holding a red bottle in his hand. In the background, a character with long hair and a black top hat stands in front of a blue refrigerator with its door open, revealing various bottles, cans, and food inside.\nThe character in the black top hat is rummaging through the refrigerator and tossing items inside backwards.\nThe scene then cuts to a side view of the black top hat character and the refrigerator, showing some orange drink bottles and other food items inside. The character in the black top hat grabs a slice of meat and starts eating it, then spits it out and casually tosses the meat backwards.\nThe camera pulls back to show both individuals continuing their activities in the kitchen with their backs turned to each other.\nThe scene shifts back to the male, who stuffs a slice of meat into his mouth. The background features the character in the black top hat and the open refrigerator door.\nSubsequently, the camera cuts back to the character in the black top hat and the interior of the refrigerator, where the character continues to search for food and opens a bucket containing yellow cheese. Finally, the scene returns to the male, who is still eating what's in his hand and licking his fingers. The refrigerator door remains open in the background.", "events": [ { "event": "At the beginning of the video, a male figure appears on the screen wearing a red hat and a gray coat, holding a red bottle in his hand. In the background, a character with long hair and a black top hat stands in front of a blue refrigerator with its door open, revealing various bottles, cans, and food inside.", "visual_elements": [ { "content": "At the beginning of the video, a male wearing a red hat and a gray coat appears on the screen.", "type": "camera", "weight": 3 }, { "content": "The man holds a red bottle in his hand.", "type": "attribute", "weight": 3 }, { "content": "In the background, a character wears a black top hat and long hair stands in front of a blue refrigerator.", "type": "scene", "weight": 2 }, { "content": "The refrigerator door is open, revealing various bottles, cans, and food items inside.", "type": "attribute", "weight": 2 } ] }, { "event": "The character in the black top hat is rummaging through the refrigerator and tossing items inside backwards.", "visual_elements": [ { "content": "The character in the black top hat is searching for food in the refrigerator.", "type": "action", "weight": 3 }, { "content": "The character in the black top hat throws items inside the refrigerator backwards.", "type": "action", "weight": 3 } ] }, { "event": "The scene then cuts to a side view of the black top hat character and the refrigerator, showing some orange drink bottles and other food items inside. The character in the black top hat grabs a slice of meat and starts eating it, then spits it out and casually tosses the meat backwards.", "visual_elements": [ { "content": "Next, the camera cuts to the character in the black top hat.", "type": "camera", "weight": 3 }, { "content": "Inside the refrigerator, there are some orange drink bottles and other food items.", "type": "attribute", "weight": 1 }, { "content": "The character in the black top hat starts eating a slice of meat.", "type": "action", "weight": 3 }, { "content": "Then, the character in the black top hat spits out the food.", "type": "action", "weight": 3 }, { "content": "The character in the black top hat casually throws the meat backwards.", "type": "action", "weight": 3 } ] }, { "event": "The camera pulls back to show both individuals continuing their activities in the kitchen with their backs turned to each other.", "visual_elements": [ { "content": "The camera zooms out, showing two people continuing to busy themselves in the kitchen with their backs to each other.", "type": "camera", "weight": 3 } ] }, { "event": "The scene shifts back to the male, who stuffs a slice of meat into his mouth. The background features the character in the black top hat and the open refrigerator door.", "visual_elements": [ { "content": "The camera cuts back to the male in the red hat.", "type": "camera", "weight": 3 }, { "content": "The male in the red hat puts a slice of meat in his mouth.", "type": "action", "weight": 3 }, { "content": "The background shows the character in the black top hat and the open refrigerator door.", "type": "scene", "weight": 2 } ] }, { "event": "Subsequently, the camera cuts back to the character in the black top hat and the interior of the refrigerator, where the character continues to search for food and opens a bucket containing yellow cheese. Finally, the scene returns to the male, who is still eating what's in his hand and licking his fingers. The refrigerator door remains open in the background.", "visual_elements": [ { "content": "Subsequently, the camera cuts back to the character in the black top hat and inside the refrigerator.", "type": "camera", "weight": 3 }, { "content": "The character in the black top hat continues to search for food in the refrigerator.", "type": "action", "weight": 2 }, { "content": "Black high hat character opens a bucket with yellow cheese", "type": "action", "weight": 3 }, { "content": "Finally, the camera cuts back to the Red Hat man", "type": "camera", "weight": 3 }, { "content": "Red hood man still eating what's in his hand and licking his fingers", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 20 }, { "index": "TUNA_0332", "video_path": "VELOCITI/v_Q0IHL6WGFY0_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a man dressed in a dark suit standing at a doorway. The walls on both sides of the doorway are covered with graffiti that reads \"HA! HA! HA!\". The man in the dark suit walks forward and descends the steps. In the lower right corner of the screen, a man with green hair stands with his back to the camera. The green-haired man wears a white shirt and a dark vest, and he holds a gun in his hand, pointed at the man in the suit.\nThe camera cuts to a frontal view of the green-haired man as he lowers the gun and begins to speak. In the background, several computer screens can be seen, and the room is adorned with beige curtains.\nThe camera cuts back to the man in the suit, who is now seated on the steps with his hands on his knees, also beginning to speak. There are some bottles placed around. The camera zooms in, gradually closing in on the man in the suit.\nFinally, the camera returns to the green-haired man, who is sitting on the ground surrounded by various weapons and tools. The glass windows behind him offer a view of the tall buildings outside.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man dressed in a dark suit standing at a doorway. The walls on both sides of the doorway are covered with graffiti that reads \"HA! HA! HA!\". The man in the dark suit walks forward and descends the steps. In the lower right corner of the screen, a man with green hair stands with his back to the camera. The green-haired man wears a white shirt and a dark vest, and he holds a gun in his hand, pointed at the man in the suit.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a man dressed in a dark suit", "type": "camera", "weight": 3 }, { "content": "The man dressed in a dark suit stands at a doorway.", "type": "attribute", "weight": 3 }, { "content": "The walls on both sides of the doorway are covered with graffiti that reads \"HA! HA! HA!\" .", "type": "scene", "weight": 1 }, { "content": "The man in the dark suit walks forward and descends the steps.", "type": "action", "weight": 3 }, { "content": "In the lower right corner of the screen, a man with green hair stands with his back to the camera.", "type": "attribute", "weight": 2 }, { "content": "The green-haired man wears a white shirt and a dark vest.", "type": "attribute", "weight": 2 }, { "content": "The green-haired man holds a gun in his hand, pointed at the man in the suit.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to a frontal view of the green-haired man as he lowers the gun and begins to speak. In the background, several computer screens can be seen, and the room is adorned with beige curtains.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the green-haired man.", "type": "camera", "weight": 3 }, { "content": "The green-haired man begins to speak as he lowers his gun.", "type": "attribute", "weight": 2 }, { "content": "In the background, several computer screens can be seen.", "type": "attribute", "weight": 2 }, { "content": "The room is adorned with beige curtains.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts back to the man in the suit, who is now seated on the steps with his hands on his knees, also beginning to speak. There are some bottles placed around. The camera zooms in, gradually closing in on the man in the suit.", "visual_elements": [ { "content": "The camera cuts back to the man in the suit.", "type": "camera", "weight": 3 }, { "content": "The man in the suit is now seated on the steps with his hands on his knees", "type": "attribute", "weight": 3 }, { "content": "The man in the suit begins to speak.", "type": "action", "weight": 3 }, { "content": "There are some bottles placed around.", "type": "attribute", "weight": 1 }, { "content": "The camera zooms in, gradually closing in on the man in the suit.", "type": "camera", "weight": 3 } ] }, { "event": "Finally, the camera returns to the green-haired man, who is sitting on the ground surrounded by various weapons and tools. The glass windows behind him offer a view of the tall buildings outside.", "visual_elements": [ { "content": "Finally, the camera returns to the green-haired man.", "type": "camera", "weight": 3 }, { "content": "The green-haired man sits on the ground.", "type": "attribute", "weight": 3 }, { "content": "The surroundings are filled with various weapons and tools.", "type": "scene", "weight": 1 }, { "content": "The glass windows behind the green-haired man offer a view of the tall buildings outside.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0333", "video_path": "VELOCITI/v_Q0bjuz5YBLM_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the screen is completely black. Gradually, the lens brightens up, revealing a young man standing in front of a mirror. He is wearing a blue shirt inside a black leather jacket. On the wall behind him, there hangs a crucifix, and the wallpaper has light-colored patterns on it. The man adjusts his clothes with both hands, takes a step to his right, zips up his jacket, and places his hands on his abdomen while looking at himself in the mirror.\nThe scene cuts to a dimly lit alleyway, with tall buildings towering on both sides and a wet, slippery ground reflecting a faint light. On the right side of the alley, there's a neon sign that reads \"HOTEL.\" A man riding a bicycle approaches the camera from the depths of the alley, while a few figures move in the background.", "events": [ { "event": "At the beginning of the video, the screen is completely black. Gradually, the lens brightens up, revealing a young man standing in front of a mirror. He is wearing a blue shirt inside a black leather jacket. On the wall behind him, there hangs a crucifix, and the wallpaper has light-colored patterns on it. The man adjusts his clothes with both hands, takes a step to his right, zips up his jacket, and places his hands on his abdomen while looking at himself in the mirror.", "visual_elements": [ { "content": "At the beginning of the video, the screen is completely black.", "type": "attribute", "weight": 1 }, { "content": "As the person blocking the camera moves out of the frame, the specific characters become visible on the screen.", "type": "attribute", "weight": 2 }, { "content": "A young man wearing a blue shirt underneath a black leather jacket stands in front of a mirror.", "type": "attribute", "weight": 3 }, { "content": "There hangs a crucifix on the wall behind the man.", "type": "scene", "weight": 1 }, { "content": "The walls are adorned with light-colored floral wallpaper.", "type": "attribute", "weight": 1 }, { "content": "The man in the black leather jacket adjusts his clothing with both hands.", "type": "action", "weight": 3 }, { "content": "The man in the black leather jacket takes a step to his righ", "type": "action", "weight": 3 }, { "content": "The man in the black leather jacket zips up his jacket.", "type": "action", "weight": 3 }, { "content": "The man in the black leather jacket places his hands on his abdomen while looking at himself in the mirror.", "type": "attribute", "weight": 3 } ] }, { "event": "The scene cuts to a dimly lit alleyway, with tall buildings towering on both sides and a wet, slippery ground reflecting a faint light. On the right side of the alley, there's a neon sign that reads \"HOTEL.\" A man riding a bicycle approaches the camera from the depths of the alley, while a few figures move in the background.", "visual_elements": [ { "content": "The scene cuts to a dimly lit alleyway.", "type": "camera", "weight": 3 }, { "content": "The alleyway is flanked by buildings on both sides.", "type": "attribute", "weight": 1 }, { "content": "The ground is wet and slippery, reflecting a faint light.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the alley, there's a neon sign that reads \"HOTEL.\"", "type": "attribute", "weight": 2 }, { "content": "A man riding a bicycle approaches the camera from the depths of the alley.", "type": "action", "weight": 3 }, { "content": "A few figures move in the background.", "type": "scene", "weight": 2 }, { "content": "The light in the alleyway is dim.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0334", "video_path": "VELOCITI/v_Q0gx_D--iDw_seg_65_75.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the screen displays a steep rock wall with a backdrop of red rock formations. A climber dressed in pink clothing and another climber attired in a gray sleeveless shirt and shorts are secured to the same rope. The climber in pink swings their legs above the rope, while the climber in the gray sleeveless shirt swings below it.\nNext, the camera cuts down to show three climbers suspended on climbing ropes, all equipped with climbing gear and gripping the ropes in their hands. The one at the top is dressed in gray and is adjusting the rope, the middle climber wears a black vest and is spinning, and the one at the bottom is dressed in black and firmly gripping the rope.\nThe camera zooms in for a close-up of a female climber's face and hands, showing her tightly grasping the rope and taking a deep breath.\nSubsequently, the camera cuts to a close-up of another male climber, who is also gripping the rope tightly with a serious expression.\nThe camera then returns to the top two of the three climbers mentioned earlier. Both climbers are suspended on the rock wall, continuing to adjust their positions. As the camera pans, it focuses on the climber in gray clothing, who is gripping the rope firmly with a serious look on his face.", "events": [ { "event": "At the beginning of the video, the screen displays a steep rock wall with a backdrop of red rock formations. A climber dressed in pink clothing and another climber attired in a gray sleeveless shirt and shorts are secured to the same rope. The climber in pink swings their legs above the rope, while the climber in the gray sleeveless shirt swings below it.", "visual_elements": [ { "content": "At the beginning of the video, the screen displays a steep rock wall.", "type": "camera", "weight": 3 }, { "content": "The background is brown rocks.", "type": "scene", "weight": 1 }, { "content": "A climber dressed in pink clothing", "type": "attribute", "weight": 2 }, { "content": "Another climber is wearing a gray sleeveless top and shorts.", "type": "attribute", "weight": 2 }, { "content": "Both of them are secured to the same rope.", "type": "attribute", "weight": 2 }, { "content": "The climber in pink swings their legs above the rope.", "type": "action", "weight": 3 }, { "content": "The climber in the gray sleeveless shirt swings below the rope.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts down to show three climbers suspended on climbing ropes, all equipped with climbing gear and gripping the ropes in their hands. The one at the top is dressed in gray and is adjusting the rope, the middle climber wears a black vest and is spinning, and the one at the bottom is dressed in black and firmly gripping the rope.", "visual_elements": [ { "content": "Next, the camera cuts down to show three climbers suspended on climbing ropes.", "type": "camera", "weight": 3 }, { "content": "All three climbers wears climbing gear.", "type": "attribute", "weight": 2 }, { "content": "All three climbers grip the ropes in their hands.", "type": "attribute", "weight": 2 }, { "content": "The climber at the top is dressed in gray.", "type": "attribute", "weight": 2 }, { "content": "The climber in gray is adjusting the rope.", "type": "action", "weight": 3 }, { "content": "The middle climber wears a black vest", "type": "attribute", "weight": 2 }, { "content": "The climber in the black vest is spinning.", "type": "action", "weight": 3 }, { "content": "The climber at the bottom is dressed in black", "type": "attribute", "weight": 2 }, { "content": "The climber in black firmly grips the rope.", "type": "action", "weight": 3 } ] }, { "event": "The camera zooms in for a close-up of a female climber's face and hands, showing her tightly grasping the rope and taking a deep breath.", "visual_elements": [ { "content": "The camera zooms in for a close-up of a female climber's face and hands", "type": "camera", "weight": 3 }, { "content": "The female tightly grasps the rope.", "type": "action", "weight": 3 }, { "content": "The female takes a deep breath.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera cuts to a close-up of another male climber, who is also gripping the rope tightly with a serious expression.", "visual_elements": [ { "content": "Subsequently, the camera cuts to a close-up of another male climber.", "type": "camera", "weight": 3 }, { "content": "The male also grips the rope tightly.", "type": "action", "weight": 3 }, { "content": "The male has a serious expression.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera then returns to the top two of the three climbers mentioned earlier. Both climbers are suspended on the rock wall, continuing to adjust their positions. As the camera pans, it focuses on the climber in gray clothing, who is gripping the rope firmly with a serious look on his face.", "visual_elements": [ { "content": "The camera then returns to the top two of the three climbers mentioned earlier.", "type": "camera", "weight": 3 }, { "content": "Both climbers are suspended on the rock wall, continuing to adjust their positions.", "type": "action", "weight": 3 }, { "content": "As the camera pans, it focuses on the climber in gray clothing.", "type": "camera", "weight": 3 }, { "content": "The climber in gray grips the rope firmly.", "type": "action", "weight": 3 }, { "content": "The climber in gray has a serious expression.", "type": "attribute", "weight": 2 } ] } ], "n_events": 5, "n_elements": 27 }, { "index": "TUNA_0335", "video_path": "VELOCITI/v_Q0gx_D--iDw_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera begins to focus on a middle-aged male climber, wearing a gray long-sleeved shirt with red and white stripes on the sleeves, a safety belt around his waist, and both hands tightly gripping a green climbing rope as he strives to maintain balance. The background is a steep rock wall, with a natural reddish-brown color appeared on the rocks.\nSuddenly, the male climber's left hand slips off the rope, causing the camera to shake violently. He looks down to his left in terror, his face twisted in pain, and his body starts to sway unsteadily.\nThe camera cuts to the two climbers below him, from this angle can also see one of his feet hanging in the air. The two climbers below are suspended on the same climbing rope, swaying violently. They are both wearing professional climbing gear and climbing shoes, and are connected to each other by ropes, maintaining a certain distance.\nThe camera zooms out to reveal the full view of the climbing team. Three climbers are clinging to the towering rock wall, their bodies swaying wildly. The background is a vast wilderness, with distant mountains framed against the blue sky.\nThe camera cuts to a female climber in the team. She tightly grabs the climbing rope with her left hand and suddenly turns her head upwards, her expression revealing obvious fear and tension.\nThe camera pans to a fixed protection device on the rock wall, firmly wedged in a rock crevice, sliding down sharply with the pull of the climbing rope.\nThe camera again cuts to the female climber, suspended in mid-air with her legs slightly bent and her toes pointing towards the rock wall, her body swaying uncertainly. Shot from below, her body shakes violently, her face is showing terror, and she shouts loudly to those people below.\nThe camera cuts to the three male climbers hanging on the rock wall. Their expressions vary: the first man lies flat, his face showing pain; the second man looks down to his left, both hands gripping the rope tightly; the third man is at the bottom of the team, with only one upturned leg visible in the scene.", "events": [ { "event": "The camera begins to focus on a middle-aged male climber, wearing a gray long-sleeved shirt with red and white stripes on the sleeves, a safety belt around his waist, and both hands tightly gripping a green climbing rope as he strives to maintain balance. The background is a steep rock wall, with a natural reddish-brown color appeared on the rocks.", "visual_elements": [ { "content": "The camera begins to focus on a middle-aged male climber.", "type": "camera", "weight": 3 }, { "content": "He is wearing a gray long-sleeved shirt with red and white stripes on the sleeves, and a safety belt is fastened around his waist.", "type": "attribute", "weight": 2 }, { "content": "He is pulling hard on a green climbing rope.", "type": "action", "weight": 3 }, { "content": "The background is a steep rock wall.", "type": "scene", "weight": 2 }, { "content": "The rocks appear a natural reddish-brown color.", "type": "scene", "weight": 2 } ] }, { "event": "Suddenly, the male climber's left hand slips off the rope, causing the camera to shake violently. He looks down to his left in terror, his face twisted in pain, and his body starts to sway unsteadily.", "visual_elements": [ { "content": "The male climber's left hand slips off the rope abruptly.", "type": "action", "weight": 3 }, { "content": "Camera shakes, and the male climber looks down to his left in the scene.", "type": "camera", "weight": 3 }, { "content": "His face twisted in pain, and his body sways.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the two climbers below him, from this angle can also see one of his feet hanging in the air. The two climbers below are suspended on the same climbing rope, swaying violently. They are both wearing professional climbing gear and climbing shoes, and are connected to each other by ropes, maintaining a certain distance.", "visual_elements": [ { "content": "The camera cuts to the two climbers below the man.", "type": "camera", "weight": 3 }, { "content": "At the same time camera shows the feet of the male climber.", "type": "attribute", "weight": 1 }, { "content": "The two climbers below are suspended on the climbing rope, swaying violently.", "type": "action", "weight": 3 }, { "content": "They are both wearing climbing gear and climbing shoes.", "type": "attribute", "weight": 2 }, { "content": "They are connected to each other by ropes.", "type": "attribute", "weight": 2 }, { "content": "They maintains a certain distance to each other.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera zooms out to reveal the full view of the climbing team. Three climbers are clinging to the towering rock wall, their bodies swaying wildly. The background is a vast wilderness, with distant mountains framed against the blue sky.", "visual_elements": [ { "content": "The camera zooms out to reveal the scene of the whole climbing team clinging to the towering rock wall.", "type": "camera", "weight": 3 }, { "content": "Three climbers are clinging to the same climbing rope, their bodies swaying wildly.", "type": "action", "weight": 3 }, { "content": "The background is a vast wilderness and distant mountains,with the splendid landscape.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a female climber in the team. She tightly grabs the climbing rope with her left hand and suddenly turns her head upwards, her expression revealing obvious fear and tension.", "visual_elements": [ { "content": "The camera cuts to a female climber in the team.", "type": "camera", "weight": 3 }, { "content": "She tightly grabs the climbing rope with her left hand on the top and suddenly turns her head upwards.", "type": "action", "weight": 3 }, { "content": "She tightly grabs the rope, her expression revealing fear and tension.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans to a fixed protection device on the rock wall, firmly wedged in a rock crevice, sliding down sharply with the pull of the climbing rope.", "visual_elements": [ { "content": "The camera cuts to a fixed protection device on the rock wall.", "type": "camera", "weight": 3 }, { "content": "The protection device is firmly wedged in a rock crevice.", "type": "attribute", "weight": 2 }, { "content": "The protective device generates significant downward sliding friction during the pulling process.", "type": "action", "weight": 3 } ] }, { "event": "The camera again cuts to the female climber, suspended in mid-air with her legs slightly bent and her toes pointing towards the rock wall, her body swaying uncertainly. Shot from below, her body shakes violently, her face is showing terror, and she shouts loudly to those people below.", "visual_elements": [ { "content": "The camera again cuts to the female climber.", "type": "camera", "weight": 3 }, { "content": "She suspends in mid-air with her legs slightly bent and her toes pointing towards the rock wall.", "type": "action", "weight": 3 }, { "content": "Shot from below, her body shakes violently.", "type": "camera", "weight": 3 }, { "content": "Her face is showing terror, and she shouts loudly to below.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the three male climbers hanging on the rock wall. Their expressions vary: the first man lies flat, his face showing pain; the second man looks down to his left, both hands gripping the rope tightly; the third man is at the bottom of the team, with only one upturned leg visible in the scene.", "visual_elements": [ { "content": "The camera again cuts to the three male climbers hanging on.", "type": "camera", "weight": 3 }, { "content": "They have painful expressions on their faces, and their bodies are in various poses.", "type": "action", "weight": 3 }, { "content": "The first man among them lies flat, his face showing pain.", "type": "attribute", "weight": 2 }, { "content": "The second man looks down to his left, both hands gripping the rope tightly.", "type": "attribute", "weight": 2 }, { "content": "The third man is at the bottom of the scene, showing only one upturned leg.", "type": "attribute", "weight": 2 } ] } ], "n_events": 8, "n_elements": 32 }, { "index": "TUNA_0336", "video_path": "VELOCITI/v_QNCuoEO3jFY_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a small boat with three people on it. In the middle is an adult man, turning his head to the right, wearing a light-colored long-sleeved shirt, and with curly long hair. The boy on the left side of the scene wears a white T-shirt with dark sleeves, pointing forward with his right index finger and slightly opening his mouth. The child held in the man's arms has its head slightly tilted backwards, with its eyes tightly closed. The background is a dense wood, and delicate ripples are dancing on the water surface. The man continues to turn his head downwards to the right, his eyes widening with a tense expression. Then, he turns his head back to face forward, his gaze firm and determined.\nThe camera cuts to the back view of the man, where his curly hair is glowing in the sunlight. The background is a patch of water and the woods along the shoreline.\nThe boat rapidly approaches the shore, and the camera cuts to the scene of the boat docking. The man jumps off the boat, wearing jeans and dark boots, and strides ashore while holding the child wrapped in a gray blanket in his arms. The child being held wears black shoes and is fully covered. The backdrop is a wood with a thick layer of fallen leaves covering the ground. Sunlight filters through the leaves, casting a dappled pattern of light and shadow on the ground.\nAfter the man runs ashore, the camera zooms out, capturing him as he runs deep into the woods and then disappears from the scene.\nThe scene cuts to the boat gradually approaching the shore. The boy on the boat turns back to pick up something, and then also stands up to disembark.", "events": [ { "event": "At the beginning of the video, the camera focuses on a small boat with three people on it. In the middle is an adult man, turning his head to the right, wearing a light-colored long-sleeved shirt, and with curly long hair. The boy on the left side of the scene wears a white T-shirt with dark sleeves, pointing forward with his right index finger and slightly opening his mouth. The child held in the man's arms has its head slightly tilted backwards, with its eyes tightly closed. The background is a dense wood, and delicate ripples are dancing on the water surface. The man continues to turn his head downwards to the right, his eyes widening with a tense expression. Then, he turns his head back to face forward, his gaze firm and determined.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a small boat.", "type": "camera", "weight": 3 }, { "content": "There are three people in the boat.", "type": "action", "weight": 3 }, { "content": "In the middle is an adult man, turning his head to look to the right.", "type": "attribute", "weight": 2 }, { "content": "He is wearing a light-colored long-sleeved shirt, and has curly long hair.", "type": "attribute", "weight": 2 }, { "content": "The boy on the left side of the scene wears a white T-shirt with dark sleeves.", "type": "action", "weight": 3 }, { "content": "Points forward with the right index finger and slightly opening mouth.", "type": "attribute", "weight": 3 }, { "content": "The child held in the man's arms has its eyes tightly closed.", "type": "attribute", "weight": 2 }, { "content": "The background is a dense wood.", "type": "scene", "weight": 2 }, { "content": "The water surface ripples.", "type": "scene", "weight": 2 }, { "content": "The man continues to turn his head downwards to his right, his eyes widening.", "type": "action", "weight": 2 }, { "content": "Then he turns his head back to face forward, looking ahead with determination.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the back view of the man, where his curly hair is glowing in the sunlight. The background is a patch of water and the woods along the shoreline.", "visual_elements": [ { "content": "The camera cuts to the back view of the man.", "type": "camera", "weight": 3 }, { "content": "His curly hair is glowing in the sunlight.", "type": "attribute", "weight": 2 }, { "content": "The camera cuts to the back view of the man, where his curly hair is glowing in the sunlight. The background is a patch of water and the woods along the shoreline.", "type": "scene", "weight": 3 } ] }, { "event": "The boat rapidly approaches the shore, and the camera cuts to the scene of the boat docking. The man jumps off the boat, wearing jeans and dark boots, and strides ashore while holding the child wrapped in a gray blanket in his arms. The child being held wears black shoes and is fully covered. The backdrop is a wood with a thick layer of fallen leaves covering the ground. Sunlight filters through the leaves, casting a dappled pattern of light and shadow on the ground.", "visual_elements": [ { "content": "The boat rapidly approaches the shore.", "type": "action", "weight": 3 }, { "content": "The camera cuts to the scene of the boat docking.", "type": "camera", "weight": 3 }, { "content": "The man jumps off the boat.", "type": "action", "weight": 3 }, { "content": "He is wearing jeans and dark boots.", "type": "attribute", "weight": 2 }, { "content": "He strides ashore while holding the child wrapped in a gray blanket in his arms.", "type": "action", "weight": 3 }, { "content": "The child being held is wearing black shoes and is fully covered.", "type": "attribute", "weight": 2 }, { "content": "The backdrop is a wood with a thick layer of fallen leaves covering the ground. Sunlight filters through the leaves, casting a dappled pattern of light and shadow.", "type": "scene", "weight": 2 } ] }, { "event": "After the man runs ashore, the camera zooms out, capturing him as he runs deep into the woods and then disappears from the scene.", "visual_elements": [ { "content": "After the man runs ashore, the camera zooms out.", "type": "camera", "weight": 3 }, { "content": "The man runs deep into the woods and then disappears from the shot.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to the boat gradually approaching the shore. The boy on the boat turns back to pick up something, and then also stands up to disembark.", "visual_elements": [ { "content": "The scene cuts to the boat gradually approaching the shore.", "type": "camera", "weight": 3 }, { "content": "The boy on the boat turns back to pick up something, and then also stands up to disembark.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 25 }, { "index": "TUNA_0337", "video_path": "VELOCITI/v_QNCuoEO3jFY_seg_120_130.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a large white truck. A close-up shot reveals the truck's massive front grille and round headlights, exuding a sense of stability and power. The truck is speeding along a rural highway, with the background of an expansive field.\nThe camera cuts to the rear of the truck, showing its long trailer and bright taillights; Then, the shot pans to the left, and a man enters the scene from the left side. The man's hair is disheveled, and he is wearing a white shirt, cradling a person wrapped in cloth in his arms. The backdrop is a straight highway, with vast fields on both sides.\nThe camera pans to the entrance of a building, where a man dressed in a blue nurse's uniform is slowly pushing an elderly person in a wheelchair out of the building. The exterior wall of the building is made of red bricks, with a rolled-up hose and a trash can nearby. The elderly person is wearing a gray sweater and beige pants, holding a newspaper in his hand.\nThe camera cuts to a scene where the man is riding a motorcycle rapidly across a grassy field, still cradling the person wrapped in cloth in his arms. He speeds through the grass and rides onto the cemented ground, with several buildings and some parked cars serving as the backdrop.\nThe scene pans once again to the front of the red-brick building, where the man dressed in a blue nurse's uniform, pushing the elderly person in a wheelchair, slowly turns his head and looks to his left together.\nThe camera cuts to the man riding the motorcycle, who is holding the person in his arms and preparing to jump off the motorcycle.", "events": [ { "event": "At the beginning of the video, the camera focuses on a large white truck. A close-up shot reveals the truck's massive front grille and round headlights, exuding a sense of stability and power. The truck is speeding along a rural highway, with the background of an expansive field.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a large white truck.", "type": "camera", "weight": 3 }, { "content": "A close-up shot reveals the truck's massive front grille and round headlights.", "type": "attribute", "weight": 2 }, { "content": "The truck is speeding along a rural highway.", "type": "action", "weight": 1 }, { "content": "The background is an expansive field and distant wood.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to the rear of the truck, showing its long trailer and bright taillights; Then, the shot pans to the left, and a man enters the scene from the left side. The man's hair is disheveled, and he is wearing a white shirt, cradling a person wrapped in cloth in his arms. The backdrop is a straight highway, with vast fields on both sides.", "visual_elements": [ { "content": "The camera cuts to the rear of the truck, showing its long trailer and bright taillights.", "type": "camera", "weight": 3 }, { "content": "The shot pans to the left.", "type": "camera", "weight": 3 }, { "content": "A man enters the scene from the left side.", "type": "action", "weight": 3 }, { "content": "The man's hair is disheveled.", "type": "attribute", "weight": 2 }, { "content": "The man is wearing a white shirt, cradling a person wrapped in cloth in his arms.", "type": "attribute", "weight": 2 }, { "content": "The backdrop is a straight highway, with vast fields on both sides and distant woods.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans to the entrance of a building, where a man dressed in a blue nurse's uniform is slowly pushing an elderly person in a wheelchair out of the building. The exterior wall of the building is made of red bricks, with a rolled-up hose and a trash can nearby. The elderly person is wearing a gray sweater and beige pants, holding a newspaper in his hand.", "visual_elements": [ { "content": "The camera pans to the entrance of a building.", "type": "camera", "weight": 3 }, { "content": "A man dressed in a blue nurse's uniform is slowly pushing an elderly person in a wheelchair out of the building.", "type": "action", "weight": 3 }, { "content": "The exterior wall of the building is made of red bricks, with a rolled-up hose and a trash can nearby.", "type": "scene", "weight": 2 }, { "content": "The elderly person is wearing a gray sweater and beige pants, holding a newspaper in hand.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a scene where the man is riding a motorcycle rapidly across a grassy field, still cradling the person wrapped in cloth in his arms. He speeds through the grass and rides onto the cemented ground, with several buildings and some parked cars serving as the backdrop.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man is riding a motorcycle rapidly across a grassy field.", "type": "action", "weight": 3 }, { "content": "The man is still cradling the person wrapped in cloth in his arms.", "type": "attribute", "weight": 2 }, { "content": "The man speeds through the grass and rides onto the cemented ground.", "type": "action", "weight": 3 }, { "content": "The background is with several buildings and some parked cars.", "type": "scene", "weight": 2 } ] }, { "event": "The scene pans once again to the front of the red-brick building, where the man dressed in a blue nurse's uniform, pushing the elderly person in a wheelchair, slowly turns his head and looks to his left together.", "visual_elements": [ { "content": "The scene pans once again to the front of the red-brick building.", "type": "camera", "weight": 3 }, { "content": "The man dressed in a blue nurse's uniform is pushing the elderly person in a wheelchair, and slowly turns his head and looks to his left together.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the man riding the motorcycle, who is holding the person in his arms and preparing to jump off the motorcycle.", "visual_elements": [ { "content": "The camera cuts to the man riding the motorcycle.", "type": "camera", "weight": 3 }, { "content": "The man is holding the person in his arms and preparing to jump off the motorcycle.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 23 }, { "index": "TUNA_0338", "video_path": "VELOCITI/v_QNCuoEO3jFY_seg_30_40.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a woodland where sunlight streams down to the ground through the leaves. A man, dressed in a white shirt and blue jeans, cradles a child dressed in dark clothing in his arms. As the camera zooms in, the man, carrying the child, rapidly runs forward. Behind him, another boy, wearing a white and gray long-sleeved T-shirt and camouflage pants, is chasing after him. In the background, the distant water surface and trees lining both sides of the road can be seen. Both the man and the boy chasing behind him gradually exit the frame from the left side.\nThe camera cuts to a wider angle as they run from the woods, arriving at an open riverbank. The camera pans to the left, revealing a small boat on the river near the shore. The man quickly runs towards the boat. In the background, the expanse of the river is visible. The man jumps onto the boat while holding the child, then he sits down facing the camera. The boy, who follows closely behind, also jumps onto the boat and walks to the back of the man, facing away from the camera. The boat begins to rock due to their movements.", "events": [ { "event": "At the beginning of the video, the camera focuses on a woodland where sunlight streams down to the ground through the leaves. A man, dressed in a white shirt and blue jeans, cradles a child dressed in dark clothing in his arms. As the camera zooms in, the man, carrying the child, rapidly runs forward. Behind him, another boy, wearing a white and gray long-sleeved T-shirt and camouflage pants, is chasing after him. In the background, the distant water surface and trees lining both sides of the road can be seen. Both the man and the boy chasing behind him gradually exit the frame from the left side.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a woodland.", "type": "camera", "weight": 3 }, { "content": "The sunlight streams down to the ground through the leaves in the wood.", "type": "scene", "weight": 2 }, { "content": "A man is wearing a white shirt and blue jeans.", "type": "attribute", "weight": 2 }, { "content": "The man cradles a child dressed in dark clothing in his arms.", "type": "attribute", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The man, carrying the child, rapidly runs forward.", "type": "action", "weight": 3 }, { "content": "Behind him, another boy, wearing a white and gray long-sleeved T-shirt and camouflage pants, is chasing after him.", "type": "action", "weight": 3 }, { "content": "In the background, the distant water surface and trees lining both sides of the road can be seen.", "type": "scene", "weight": 2 }, { "content": "Both the man and the boy chasing behind him gradually exit the scene from the left side.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a wider angle as they run from the woods, arriving at an open riverbank. The camera pans to the left, revealing a small boat on the river near the shore. The man quickly runs towards the boat. In the background, the expanse of the river is visible. The man jumps onto the boat while holding the child, then he sits down facing the camera. The boy, who follows closely behind, also jumps onto the boat and walks to the back of the man, facing away from the camera. The boat begins to rock due to their movements.", "visual_elements": [ { "content": "The camera cuts to a wider angle.", "type": "camera", "weight": 3 }, { "content": "They run from the woods, arriving at an open riverbank.", "type": "action", "weight": 2 }, { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "There is a small boat on the river near the shore.", "type": "attribute", "weight": 2 }, { "content": "The man quickly runs towards the boat.", "type": "action", "weight": 3 }, { "content": "In the background, the expanse of the river is visible.", "type": "scene", "weight": 2 }, { "content": "The man jumps onto the boat while holding the child, then he sits down facing the camera.", "type": "action", "weight": 3 }, { "content": "The boy, who follows closely behind, also jumps onto the boat and walks to the back of the man, facing away from the camera.", "type": "action", "weight": 3 }, { "content": "The boat begins to rock due to their movements.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0339", "video_path": "VELOCITI/v_QaNag38SNno_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a man and a woman appear in the scene. The man stands on the left side of the scene, wearing a light yellow short-sleeved shirt with his arms spread out in front of his chest. The woman stands on the right side of the scene, dressed in a purple short-sleeved top and a floral skirt, facing away from the camera with her body slightly bent. The backdrop is a lake at night, with the trees along the shore and distant lights faintly visible. The woman gradually bends down.\nThe camera cuts to another angle, where the woman is now facing the camera while the man is facing away from it. They are standing next to a black car with its trunk open, and a large blue bag lies on the ground. Both of them bend down and lift the blue bag together.", "events": [ { "event": "At the beginning of the video, a man and a woman appear in the scene. The man stands on the left side of the scene, wearing a light yellow short-sleeved shirt with his arms spread out in front of his chest. The woman stands on the right side of the scene, dressed in a purple short-sleeved top and a floral skirt, facing away from the camera with her body slightly bent. The backdrop is a lake at night, with the trees along the shore and distant lights faintly visible. The woman gradually bends down.", "visual_elements": [ { "content": "At the beginning of the video, a man and a woman are in the scene.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a light yellow short-sleeved shirt.", "type": "attribute", "weight": 2 }, { "content": "The man stands on the left side of the scene, with his arms spread out in front of his chest.", "type": "action", "weight": 2 }, { "content": "The woman is wearing a purple short-sleeved top and a floral skirt.", "type": "attribute", "weight": 1 }, { "content": "The woman stands on the right side of the scene, facing away from the camera with her body slightly bent.", "type": "action", "weight": 3 }, { "content": "The backdrop is a lake at night.", "type": "scene", "weight": 2 }, { "content": "There are trees along the shore and lights in remote.", "type": "scene", "weight": 1 }, { "content": "The woman gradually bends down.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to another angle, where the woman is now facing the camera while the man is facing away from it. They are standing next to a black car with its trunk open, and a large blue bag lies on the ground. Both of them bend down and lift the blue bag together.", "visual_elements": [ { "content": "The camera cuts to another angle, where the woman is facing the camera while the man is facing away from it.", "type": "camera", "weight": 3 }, { "content": "They are next to a black car.", "type": "scene", "weight": 2 }, { "content": "The trunk of the car is open.", "type": "attribute", "weight": 2 }, { "content": "A large blue bag is on the ground.", "type": "attribute", "weight": 2 }, { "content": "Both of them bend down and lift the blue bag together.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0340", "video_path": "VELOCITI/v_QiVfqVQ5t9A_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a enclosed space. The wall directly opposite is a square metal plate, surrounded by metal fence, with light shining through the gaps between them.\nAs the metal plate in front of the camera retreats and then slowly descends, several figures gradually appear in the scene. A middle-aged man dressed in a dark coat leans into the space. Behind and to the left of the man stands a girl wearing a dark coat, while in the background, a young man sits further back. The middle-aged man holds a small tool resembling a steel wire in his left hand, with his left arm resting on the edge of the metal space, and he looks down intently at a spot below.\nThe camera cuts to a shiny metal strip, with a series of inverted numbers \"582 434 865\" engraved on it. The camera moves upwards to reveal another metal strip, which has another series of numbers \"149 419 568\" engraved on it.\nThe camera returns to the middle-aged man, who raises the glasses in his hand and places them in front of his eyes. He gently moves his eyelids aside and his lips slightly tremble as he begins to speak.", "events": [ { "event": "At the beginning of the video, the camera focuses on a enclosed space. The wall directly opposite is a square metal plate, surrounded by metal fence, with light shining through the gaps between them.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a enclosed space.", "type": "camera", "weight": 3 }, { "content": "The wall directly opposite the shot is a square metal plate.", "type": "camera", "weight": 2 }, { "content": "Other sides are surrounded by metal fence.", "type": "attribute", "weight": 2 }, { "content": "The light is shining through the gaps between the fence.", "type": "scene", "weight": 2 } ] }, { "event": "As the metal plate in front of the camera retreats and then slowly descends, several figures gradually appear in the scene. A middle-aged man dressed in a dark coat leans into the space. Behind and to the left of the man stands a girl wearing a dark coat, while in the background, a young man sits further back. The middle-aged man holds a small tool resembling a steel wire in his left hand, with his left arm resting on the edge of the metal space, and he looks down intently at a spot below.", "visual_elements": [ { "content": "The metal plate in front of the shot retreats and then slowly descends.", "type": "action", "weight": 3 }, { "content": "Several figures gradually appear in the scene.", "type": "attribute", "weight": 3 }, { "content": "A middle-aged man dressed in a dark coat leans into the space.", "type": "action", "weight": 3 }, { "content": "Behind and to the left of the man stands a girl wearing a dark coat.", "type": "scene", "weight": 2 }, { "content": "A young man sits further back in the background.", "type": "scene", "weight": 2 }, { "content": "The middle-aged man holds a small tool resembling a steel wire in his left hand.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man's left arm is resting on the edge of the metal space.", "type": "action", "weight": 3 }, { "content": "He looks down below the metal space.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a shiny metal strip, with a series of inverted numbers \"582 434 865\" engraved on it. The camera moves upwards to reveal another metal strip, which has another series of numbers \"149 419 568\" engraved on it.", "visual_elements": [ { "content": "The camera cuts to a shiny metal strip.", "type": "camera", "weight": 3 }, { "content": "A series of inverted numbers \"582 434 865\" are engraved on it.", "type": "scene", "weight": 3 }, { "content": "The camera moves upwards to reveal another metal strip.", "type": "camera", "weight": 3 }, { "content": "There is a series of engraved numbers \"149 419 568\" on it.", "type": "scene", "weight": 3 } ] }, { "event": "The camera returns to the middle-aged man, who raises the glasses in his hand and places them in front of his eyes. He gently moves his eyelids aside and his lips slightly tremble as he begins to speak.", "visual_elements": [ { "content": "The camera returns to the middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man raises the glasses in his hand and places them in front of his eyes.", "type": "action", "weight": 3 }, { "content": "He begins to speak.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0341", "video_path": "VELOCITI/v_RLUhwGLJhsI_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the scene, the camera focuses on a firefighter. He is bent over, dealing with an emergency situation on site. The surroundings are dimly lit, with sparks flying and smoke billowing.\nThe camera cuts to another firefighter. He is wearing full firefighting gear, and his mouth is widely open. Next to him, another firefighter is holding a fire extinguisher, spraying water towards the flames.\nThe camera cuts again to show the firefighter in full gear approaching the one who is kneeling and bending over to handle the fire scene. The firefighter in full gear also kneels down to assist in dealing with the fire. More firefighters walk towards the two firefighters on the ground. In the background, sofas and sculptural decorations can be seen.\nThe camera cuts to the exterior of the building, where several firefighters rush out of the revolving door, their faces showing signs of exhaustion and tension. The front end of a red car is visible on the right side of the scene.\nThe camera pans to a man in a gray shirt and a woman in a firefighter's jacket, who are standing beside a fire truck, taking drinks of water. In the background, some busy firefighters and police officers can be seen. Suddenly, the man in the gray shirt glances to the right of the scene. The camera follows his movement to the right, and the man in the gray shirt walks to the back of the fire truck with his paper cup.", "events": [ { "event": "At the beginning of the scene, the camera focuses on a firefighter. He is bent over, dealing with an emergency situation on site. The surroundings are dimly lit, with sparks flying and smoke billowing.", "visual_elements": [ { "content": "At the beginning of the scene, the camera focuses on a firefighter.", "type": "camera", "weight": 3 }, { "content": "The firefighter is bent over, dealing with an emergency situation on site.", "type": "action", "weight": 2 }, { "content": "The surroundings are dimly lit, with sparks flying and smoke billowing.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to another firefighter. He is wearing full firefighting gear, and his mouth is widely open. Next to him, another firefighter is holding a fire extinguisher, spraying water towards the flames.", "visual_elements": [ { "content": "The camera cuts to another firefighter.", "type": "camera", "weight": 3 }, { "content": "He is wearing full firefighting gear, and his mouth is widely open as he is shouting.", "type": "action", "weight": 3 }, { "content": "Next to him, another firefighter is holding a fire extinguisher, spraying water towards the flames.", "type": "action", "weight": 1 } ] }, { "event": "The camera cuts again to show the firefighter in full gear approaching the one who is kneeling and bending over to handle the fire scene. The firefighter in full gear also kneels down to assist in dealing with the fire. More firefighters walk towards the two firefighters on the ground. In the background, sofas and sculptural decorations can be seen.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The firefighter in full gear approaches the one who is kneeling and bending over to handle the fire scene.", "type": "action", "weight": 3 }, { "content": "The firefighter in full gear also kneels down to assist in dealing with the fire.", "type": "action", "weight": 3 }, { "content": "More firefighters walk towards the two firefighters on the ground.", "type": "action", "weight": 2 }, { "content": "In the background, sofas and sculptural decorations can be seen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to the exterior of the building, where several firefighters rush out of the revolving door, their faces showing signs of exhaustion and tension. The front end of a red car is visible on the right side of the scene.", "visual_elements": [ { "content": "The camera cuts to the exterior of the building.", "type": "camera", "weight": 3 }, { "content": "Several firefighters rush out of the revolving door.", "type": "action", "weight": 3 }, { "content": "The front end of a red car is visible on the right side of the scene.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans to a man in a gray shirt and a woman in a firefighter's jacket, who are standing beside a fire truck, taking drinks of water. In the background, some busy firefighters and police officers can be seen. Suddenly, the man in the gray shirt glances to the right of the scene. The camera follows his movement to the right, and the man in the gray shirt walks to the back of the fire truck with his paper cup.", "visual_elements": [ { "content": "The camera pans to a man in a gray shirt and a woman in a firefighter's jacket.", "type": "camera", "weight": 3 }, { "content": "They are standing beside a fire truck, taking drinks of water.", "type": "action", "weight": 2 }, { "content": "In the background, some busy firefighters and police officers can be seen.", "type": "scene", "weight": 2 }, { "content": "Suddenly, the man in the gray shirt glances to the right of the scene.", "type": "action", "weight": 3 }, { "content": "The camera pans to the right", "type": "camera", "weight": 3 }, { "content": "The man in the gray shirt walks to the back of the fire truck with his paper cup.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 20 }, { "index": "TUNA_0342", "video_path": "VELOCITI/v_ROeFmcvZRf8_seg_110_120.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a blonde-haired woman. She wears a loose white top and sits on the carpet, her right hand supporting the ground as she lifts her hand to cover her head with a distressed expression.\nThe camera pans to a curly-haired woman who holds a telephone receiver in her right hand. In the background, a lamp and a painting on the wall are visible. The curly-haired woman hangs up the phone, her expression turning fierce as she quickly moves forward.\nThe camera cuts to a black man, who also hangs up his phone.\nThe camera returns to the blonde woman in the white top. She stands up from the ground, supporting herself with her left hand on the wall as she walks towards the doorway of the room. The camera retreats following her movements.\nThe camera cuts. As the blonde woman reaches the doorway, the curly-haired woman suddenly appears, grabbing the blonde woman from behind and slamming her into the wall on the left side of the doorway.\nThe camera cuts to the hallway, where the curly-haired woman grips the blonde woman and throws her to the ground. The camera moves with the curly-haired woman's actions.\nThe camera cuts once again to the black man in the office. He holds a mobile phone in his left hand, tensely making a call. Meanwhile, he quickly turns around and urgently presses the elevator button with his right hand.", "events": [ { "event": "At the beginning of the video, the camera focuses on a blonde-haired woman. She wears a loose white top and sits on the carpet, her right hand supporting the ground as she lifts her hand to cover her head with a distressed expression.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a blonde-haired woman.", "type": "camera", "weight": 3 }, { "content": "The blonde-haired woman wears a loose white top.", "type": "attribute", "weight": 2 }, { "content": "The blonde-haired woman sits on the carpet, her right hand supporting the ground .", "type": "action", "weight": 2 }, { "content": "The blonde-haired woman lifts her hand to cover her head with a distressed expression.", "type": "action", "weight": 2 } ] }, { "event": "The camera pans to a curly-haired woman who holds a telephone receiver in her right hand. In the background, a lamp and a painting on the wall are visible. The curly-haired woman hangs up the phone, her expression turning fierce as she quickly moves forward.", "visual_elements": [ { "content": "The camera pans to a curly-haired woman who holds a telephone receiver in her right hand.", "type": "camera", "weight": 3 }, { "content": "In the background, a lamp and a painting on the wall are visible.", "type": "scene", "weight": 1 }, { "content": "The curly-haired woman hangs up the phone, her expression turning fierce as she quickly moves forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a black man, who also hangs up his phone.", "visual_elements": [ { "content": "The camera cuts to a black man, who also hangs up his phone.", "type": "camera", "weight": 3 } ] }, { "event": "The camera returns to the blonde woman in the white top. She stands up from the ground, supporting herself with her left hand on the wall as she walks towards the doorway of the room. The camera retreats following her movements.", "visual_elements": [ { "content": "The camera returns to the blonde woman in the white top.", "type": "camera", "weight": 3 }, { "content": "The blonde-haired woman stands up from the ground.", "type": "action", "weight": 3 }, { "content": "Walk towards the doorway of the room by supporting herself with her left hand on the wall.", "type": "action", "weight": 3 }, { "content": "The camera retreats following her movements.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts. As the blonde woman reaches the doorway, the curly-haired woman suddenly appears, grabbing the blonde woman from behind and slamming her into the wall on the left side of the doorway.", "visual_elements": [ { "content": "The camera cuts", "type": "scene", "weight": 1 }, { "content": "As the blonde woman reaches the doorway, the curly-haired woman suddenly appears.", "type": "action", "weight": 3 }, { "content": "The curly-haired woman grabs the blonde woman from behind and slams her into the wall on the left side of the doorway.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the hallway, where the curly-haired woman grips the blonde woman and throws her to the ground. The camera moves with the curly-haired woman's actions.", "visual_elements": [ { "content": "The camera cuts to the hallway.", "type": "camera", "weight": 3 }, { "content": "The curly-haired woman grips the blonde woman and throws her to the ground.", "type": "action", "weight": 3 }, { "content": "The camera moves with the curly-haired woman's actions.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts once again to the black man in the office. He holds a mobile phone in his left hand, tensely making a call. Meanwhile, he quickly turns around and urgently presses the elevator button with his right hand.", "visual_elements": [ { "content": "The camera cuts once again to the black man.", "type": "camera", "weight": 3 }, { "content": "He holds a mobile phone in his left hand, making a call with a tense facial expression.", "type": "action", "weight": 2 }, { "content": "The black man turns around and presses the elevator button with his right hand.", "type": "action", "weight": 3 } ] } ], "n_events": 7, "n_elements": 21 }, { "index": "TUNA_0343", "video_path": "VELOCITI/v_ROeFmcvZRf8_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a room where two women are engaged in a fierce struggle, with a lamp and a painting hanging on the wall visible in the background.\nThe woman on the left has blonde hair and wears a white top; the woman on the right has red curly hair and wears a white top with a light-colored vest. The blonde woman suddenly pushes the red-haired woman forcefully to one side, and the red-haired woman is immediately knocked to the ground.\nThe camera cuts to a close-up shot of the red-haired woman lying on the ground, sideways on the carpet, with a painful expression on her face.\nThe camera pans to the blonde woman standing over the red-haired woman. She has an angry expression and her movements are swift and forceful.\nThe camera cuts back to the red-haired woman, who is trying to protect herself with her arms. The blonde woman continues to exert pressure on her with her hands and legs, her actions resolute and forceful.\nThe camera cuts to a close-up of the blonde woman's face, showing anger and determination.\nThe camera returns to the red-haired woman, who is struggling on the ground with a painful expression, trying to break free from the restraint. The blonde woman forcefully holds down her arms, while the red-haired woman kicks at the blonde woman to push her away.\nThe camera cuts again, showing the blonde woman being kicked and stumbling backwards into the wall behind her, with a painful expression. The camera then cuts to the red-haired woman, who slowly sits up from the ground, her gaze fixed on the opposite side. The furniture and decorations in the room are visible in the background.", "events": [ { "event": "At the beginning of the video, the camera focuses on a room where two women are engaged in a fierce struggle, with a lamp and a painting hanging on the wall visible in the background.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a room.", "type": "camera", "weight": 3 }, { "content": "A lamp and a painting hanging on the wall are visible in the background.", "type": "scene", "weight": 2 }, { "content": "Two women are grappling in the scene.", "type": "action", "weight": 2 } ] }, { "event": "The woman on the left has blonde hair and wears a white top; the woman on the right has red curly hair and wears a white top with a light-colored vest. The blonde woman suddenly pushes the red-haired woman forcefully to one side, and the red-haired woman is immediately knocked to the ground.", "visual_elements": [ { "content": "The woman on the left has blonde hair and wears a white top.", "type": "attribute", "weight": 2 }, { "content": "The woman on the right has red curly hair and wears a white top with a light-colored vest.", "type": "attribute", "weight": 2 }, { "content": "The blonde woman suddenly pushes the red-haired woman forcefully to one side.", "type": "action", "weight": 3 }, { "content": "is knocked down to the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up shot of the red-haired woman lying on the ground, sideways on the carpet, with a painful expression on her face.", "visual_elements": [ { "content": "The camera cuts to a close-up shot of the red-haired woman lying on the ground.", "type": "camera", "weight": 2 }, { "content": "The red-haired woman lies on the ground, sideways on the carpet, with a painful expression on her face.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera pans to the blonde woman standing over the red-haired woman. She has an angry expression and her movements are swift and forceful.", "visual_elements": [ { "content": "The camera pans to the blonde woman.", "type": "camera", "weight": 3 }, { "content": "The blonde woman has an angry expression and her movements are swift and forceful.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts back to the red-haired woman, who is trying to protect herself with her arms. The blonde woman continues to exert pressure on her with her hands and legs, her actions resolute and forceful.", "visual_elements": [ { "content": "The camera cuts back to the red-haired woman.", "type": "camera", "weight": 3 }, { "content": "The red-haired woman is trying to protect herself with her arms.", "type": "action", "weight": 3 }, { "content": "The blonde woman continues to exert pressure against the red-haired woman with her hands and legs.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up of the blonde woman's face, showing anger and determination.", "visual_elements": [ { "content": "The camera cuts to a close-up of the blonde woman's face.", "type": "camera", "weight": 3 }, { "content": "Her face shows much anger and determination.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera returns to the red-haired woman, who is struggling on the ground with a painful expression, trying to break free from the restraint. The blonde woman forcefully holds down her arms, while the red-haired woman kicks at the blonde woman to push her away.", "visual_elements": [ { "content": "The camera returns to the red-haired woman.", "type": "camera", "weight": 3 }, { "content": "The red-haired woman is struggling on the ground with a painful expression.", "type": "action", "weight": 3 }, { "content": "The blonde woman forcefully holds down the red-haired woman's arms.", "type": "action", "weight": 3 }, { "content": "The red-haired woman kicks at the blonde woman to push her away.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again, showing the blonde woman being kicked and stumbling backwards into the wall behind her, with a painful expression. The camera then cuts to the red-haired woman, who slowly sits up from the ground, her gaze fixed on the opposite side. The furniture and decorations in the room are visible in the background.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "The blonde woman is kicked and stumbled backwards into the wall behind her, with a painful expression.", "type": "action", "weight": 3 }, { "content": "The camera cuts to the red-haired woman.", "type": "camera", "weight": 3 }, { "content": "The red-haired woman slowly sits up from the ground, her gaze fixed on the opposite side.", "type": "action", "weight": 3 }, { "content": "The furniture and decorations in the room are visible in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 8, "n_elements": 25 }, { "index": "TUNA_0344", "video_path": "VELOCITI/v_RWYM4Npp9rI_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the inside of a car. There are four people inside. Sitting on the left side of the front row is an adult woman wearing a light-colored coat. In the back row, there are two girls and a boy. The boy sits on the left side of the back row by the window, and the girl on the right side by the window is older than the girl in the middle. The seats in the car are red. The adult woman opens her right car door and get off the car. After getting out, she turns around and closes the door. Both the boy in the back row and the girl in the middle adjust their sitting positions. The older girl on the right side of the back row leans forward and extends her left hand towards the front row.\nThe camera cuts to a night street scene, where dim lighting covers the street and only a few street lamps cast a faint glow. A woman wearing a trench coat runs forward with her back to the camera, heading towards a phone booth on the side of the street. Upon reaching the phone booth, the woman begins to dial a phone call.\nThe camera cuts again, taking to a scene in a laboratory or medical setting. A man dressed in a lab coat stands in the laboratory, holding a syringe in his hand. Beside him are two persons wearing work uniforms, each holding different metallic tools. In the background, there are metal cabinets filled with laboratory equipment.", "events": [ { "event": "At the beginning of the video, the camera focuses on the inside of a car. There are four people inside. Sitting on the left side of the front row is an adult woman wearing a light-colored coat. In the back row, there are two girls and a boy. The boy sits on the left side of the back row by the window, and the girl on the right side by the window is older than the girl in the middle. The seats in the car are red. The adult woman opens her right car door and get off the car. After getting out, she turns around and closes the door. Both the boy in the back row and the girl in the middle adjust their sitting positions. The older girl on the right side of the back row leans forward and extends her left hand towards the front row.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on the inside of a car.", "type": "camera", "weight": 3 }, { "content": "There are four people inside the car.", "type": "attribute", "weight": 3 }, { "content": "Sitting on the left side of the front row is an adult woman wearing a light-colored coat.", "type": "attribute", "weight": 3 }, { "content": "In the back row, there are two girls and a boy.", "type": "attribute", "weight": 3 }, { "content": "The boy sits on the left side of the back row by the window.", "type": "attribute", "weight": 2 }, { "content": "The girl on the right side by the window is older than the girl in the middle.", "type": "attribute", "weight": 2 }, { "content": "The seats in the car are red.", "type": "scene", "weight": 1 }, { "content": "The adult woman opens her right car door and get off the car.", "type": "action", "weight": 3 }, { "content": "After getting out, the adult woman turns around and closes the door.", "type": "action", "weight": 3 }, { "content": "Both the boy in the back row and the girl in the middle adjust their sitting positions.", "type": "action", "weight": 3 }, { "content": "The older girl on the right side of the back row leans forward and extends her left hand towards the front row.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a night street scene, where dim lighting covers the street and only a few street lamps cast a faint glow. A woman wearing a trench coat runs forward with her back to the camera, heading towards a phone booth on the side of the street. Upon reaching the phone booth, the woman begins to dial a phone call.", "visual_elements": [ { "content": "The camera cuts to a night street scene.", "type": "camera", "weight": 3 }, { "content": "The dim lighting covers the street and only a few street lamps cast a faint glow.", "type": "scene", "weight": 2 }, { "content": "A woman wearing a trench coat runs forward with her back to the camera.", "type": "action", "weight": 3 }, { "content": "The woman runs forward a phone booth on the side of the street.", "type": "action", "weight": 3 }, { "content": "Upon reaching the phone booth, the woman begins to dial a phone call.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again, taking to a scene in a laboratory or medical setting. A man dressed in a lab coat stands in the laboratory, holding a syringe in his hand. Beside him are two persons wearing work uniforms, each holding different metallic tools. In the background, there are metal cabinets filled with laboratory equipment.", "visual_elements": [ { "content": "The camera cuts again to a scene in a laboratory or medical setting.", "type": "camera", "weight": 3 }, { "content": "A man dressed in a lab coat stands in the laboratory.", "type": "attribute", "weight": 3 }, { "content": "The man is holding a syringe in his hand.", "type": "attribute", "weight": 2 }, { "content": "Beside him are two persons wearing work uniforms.", "type": "attribute", "weight": 3 }, { "content": "Persons wearing work uniforms are holding different metallic tools.", "type": "attribute", "weight": 2 }, { "content": "In the background, there are metal cabinets filled with laboratory equipment.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0345", "video_path": "VELOCITI/v_RWYM4Npp9rI_seg_120_130.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a middle-aged man in blue surgical attire, smiling at the camera. The man holds a silver revolver in his right hand, slowly pointing it toward the camera. The focus shifts from the man to the gun. In the background, various laboratory equipment and bottles are visible.\nThe camera then cuts to a man in a red jacket, crashing through a window from a high place, with glass shards flying.\nThe camera cuts back to the middle-aged man, who looks up and opens his mouth.\nThe camera pans to an interior laboratory scene, where two men hold metal clubs. One is wearing a green jacket, and the other is in gray work clothes. The man in the red jacket falls from a height, and the two men holding the clubs are also knocked down to the ground.\nThe camera cuts back to the middle-aged man, who looks panicked and stumbles a few steps backward.\nFinally, the camera returns to the man in the red jacket, who kneels on the ground, struggling to raise his upper body. A large Saint Bernard dog enters from the right side of the frame, approaches the man in the red jacket, and he reaches out to embrace the dog’s neck.", "events": [ { "event": "The video begins with the camera focused on a middle-aged man in blue surgical attire, smiling at the camera. The man holds a silver revolver in his right hand, slowly pointing it toward the camera. The focus shifts from the man to the gun. In the background, various laboratory equipment and bottles are visible.", "visual_elements": [ { "content": "The video begins with the camera focused on a middle-aged man in blue surgical attire.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man smiles at the camera.", "type": "attribute", "weight": 2 }, { "content": "The man holds a silver revolver in his right hand, slowly pointing it toward the camera.", "type": "action", "weight": 3 }, { "content": "The camera shifts its focus from the man to the gun.", "type": "camera", "weight": 3 }, { "content": "In the background, various laboratory equipment and bottles are visible.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then cuts to a man in a red jacket, crashing through a window from a high place, with glass shards flying.", "visual_elements": [ { "content": "The camera then cuts to a man in a red jacket.", "type": "camera", "weight": 3 }, { "content": "The man in the red jacket crashes through a window from a high place.", "type": "action", "weight": 3 }, { "content": "Glass shards fly.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts back to the middle-aged man, who looks up and opens his mouth.", "visual_elements": [ { "content": "The camera cuts back to the middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The man looks up and opens his mouth.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans to an interior laboratory scene, where two men hold metal clubs. One is wearing a green jacket, and the other is in gray work clothes. The man in the red jacket falls from a height, and the two men holding the clubs are also knocked down to the ground.", "visual_elements": [ { "content": "Then, the camera pans.", "type": "camera", "weight": 3 }, { "content": "Two men are holding metal clubs.", "type": "attribute", "weight": 2 }, { "content": "One of them is wearing a green jacket.", "type": "attribute", "weight": 2 }, { "content": "The other is wearing gray work clothes.", "type": "attribute", "weight": 2 }, { "content": "The man in the red jacket falls from a high place.", "type": "action", "weight": 3 }, { "content": "The two men with the clubs are also knocked down to the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the middle-aged man, who looks panicked and stumbles a few steps backward.", "visual_elements": [ { "content": "The camera cuts back to the middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The man looks panicked and stumbles backward.", "type": "attribute", "weight": 3 } ] }, { "event": "Finally, the camera returns to the man in the red jacket, who kneels on the ground, struggling to raise his upper body. A large Saint Bernard dog enters from the right side of the frame, approaches the man in the red jacket, and he reaches out to embrace the dog’s neck.", "visual_elements": [ { "content": "Finally, the camera returns to the man in the red jacket.", "type": "camera", "weight": 3 }, { "content": "The man in the red jacket kneels on the ground, struggling to raise his upper body.", "type": "action", "weight": 3 }, { "content": "A large Saint Bernard dog enters from the right side of the frame and approaches the man in the red jacket.", "type": "action", "weight": 3 }, { "content": "The man in the red jacket reaches out to embrace the dog’s neck.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 22 }, { "index": "TUNA_0346", "video_path": "VELOCITI/v_RWYM4Npp9rI_seg_190_200.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a middle-aged man in a blue lab coat. The lab coat is open, revealing a light-striped shirt underneath. His upper body is filled with syringes. The man looks down at the syringes on his body. A syringe enters from the right side of the frame, aiming toward the man's chest.\nThe camera pans to focus on the man’s upper body. The man steps back a few steps and hits the wall behind him. On his left side is a sink, and on his right is a black radiator. The man's expression becomes blank, and he suddenly shows a strange smile, slowly sliding down to the floor. The camera follows him as it moves down.\nThe camera then cuts to a young boy with red-framed glasses sitting in the driver's seat of a car, gripping the steering wheel with both hands, suddenly widening his eyes while staring ahead. The interior is dimly lit, and another girl passenger is visible in the back seat. The young boy smiles, and the girl in the back seat opens her mouth in a scream. The boy then turns and prepares to open the door to get out of the car.\nThe camera cuts again. A man in a red jacket kneels on the ground, pressing down on a man in gray clothing. The man in the red jacket turns his head to look to the left side of the frame, and the man in gray looks back at him. The man in the red jacket then turns back and punches the man in gray in the face.", "events": [ { "event": "The video begins with the camera focused on a middle-aged man in a blue lab coat. The lab coat is open, revealing a light-striped shirt underneath. His upper body is filled with syringes. The man looks down at the syringes on his body. A syringe enters from the right side of the frame, aiming toward the man's chest.", "visual_elements": [ { "content": "The camera focuses on a middle-aged man in a blue lab coat.", "type": "camera", "weight": 3 }, { "content": "The man’s lab coat is open, revealing a light-striped shirt underneath.", "type": "attribute", "weight": 2 }, { "content": "His upper body is filled with syringes.", "type": "attribute", "weight": 3 }, { "content": "The man looks down at the syringes on his body.", "type": "attribute", "weight": 3 }, { "content": "A syringe enters from the right side of the frame, aiming toward the man’s chest.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans to focus on the man’s upper body. The man steps back a few steps and hits the wall behind him. On his left side is a sink, and on his right is a black radiator. The man's expression becomes blank, and he suddenly shows a strange smile, slowly sliding down to the floor. The camera follows him as it moves down.", "visual_elements": [ { "content": "The camera pans to focus on the man’s upper body.", "type": "camera", "weight": 3 }, { "content": "The man steps back a few steps and hits the wall behind him.", "type": "action", "weight": 3 }, { "content": "On his left is a sink, and on his right is a black radiator.", "type": "attribute", "weight": 2 }, { "content": "The man’s expression becomes blank, and he suddenly shows a strange smile.", "type": "attribute", "weight": 2 }, { "content": "The man slides slowly down the wall and falls to the ground.", "type": "action", "weight": 3 }, { "content": "The camera follows him as it moves down.", "type": "camera", "weight": 3 } ] }, { "event": "The camera then cuts to a young boy with red-framed glasses sitting in the driver's seat of a car, gripping the steering wheel with both hands, suddenly widening his eyes while staring ahead. The interior is dimly lit, and another girl passenger is visible in the back seat. The young boy smiles, and the girl in the back seat opens her mouth in a scream. The boy then turns and prepares to open the door to get out of the car.", "visual_elements": [ { "content": "The camera then cuts to a young boy with red-framed glasses.", "type": "camera", "weight": 3 }, { "content": "The young boy is sitting in the driver's seat of a car.", "type": "attribute", "weight": 3 }, { "content": "The young boy grips the steering wheel with both hands.", "type": "attribute", "weight": 3 }, { "content": "The young boy looks ahead and suddenly widens his eyes.", "type": "attribute", "weight": 2 }, { "content": "The interior of the car is dimly lit.", "type": "scene", "weight": 1 }, { "content": "Another girl passenger is visible in the back seat.", "type": "attribute", "weight": 3 }, { "content": "The young boy smiles.", "type": "action", "weight": 2 }, { "content": "The girl in the back seat opens her mouth and screams.", "type": "action", "weight": 3 }, { "content": "The young boy then turns around and prepares to open the door to get out of the car.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts again. A man in a red jacket kneels on the ground, pressing down on a man in gray clothing. The man in the red jacket turns his head to look to the left side of the frame, and the man in gray looks back at him. The man in the red jacket then turns back and punches the man in gray in the face.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "A man in a red jacket kneels on the ground, pressing down on a man in gray clothing.", "type": "attribute", "weight": 3 }, { "content": "The man in the red jacket turns his head to look to the left side of the frame.", "type": "attribute", "weight": 3 }, { "content": "The man in gray looks back at the man in the red jacket.", "type": "action", "weight": 3 }, { "content": "The man in the red jacket turns back and punches the man in gray in the face.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0347", "video_path": "VELOCITI/v_RWYM4Npp9rI_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a man in a red jacket and khaki pants, standing on a glass skylight with a dark background. He moves forward, bending down to look below.\nThe camera then cuts to a scene below the skylight. A man in gray clothing holds a long pole against a large Saint Bernard dog, which is struggling. A man in green clothing climbs down the stairs and slowly approaches the dog. Various debris and tools are scattered on the ground, with a metal stretcher nearby.\nThe camera cuts again. A man in a blue lab coat opens the door and walks out of the room. There is a row of barriers in front of the door, and the man in the blue lab coat looks down at the scene below. The walls outside the room are adorned with anatomical charts and drawings of animal skeletons.\nThe camera cuts back to the scene with the man in gray and the man in green. The man in gray still holds the long pole, while the man in green holds a metal fork. Both men look up and to the right at the same time, with the man in green glancing at the man in gray. They continue manipulating the tools in their hands.\nFinally, the camera returns to the man in the blue lab coat, showing a shot from above his shoulders. He tilts his head, his expression serious. The door behind him gradually closes.", "events": [ { "event": "The video begins with the camera focused on a man in a red jacket and khaki pants, standing on a glass skylight with a dark background. He moves forward, bending down to look below.", "visual_elements": [ { "content": "The video begins with the camera focused on a man wearing a red jacket and khaki pants.", "type": "camera", "weight": 3 }, { "content": "The man in the red jacket is standing on a glass skylight.", "type": "attribute", "weight": 3 }, { "content": "The background is dark.", "type": "scene", "weight": 1 }, { "content": "The man in the red jacket moves forward.", "type": "action", "weight": 3 }, { "content": "The man in the red jacket bends down to look.", "type": "action", "weight": 3 } ] }, { "event": "The camera then cuts to a scene below the skylight. A man in gray clothing holds a long pole against a large Saint Bernard dog, which is struggling. A man in green clothing climbs down the stairs and slowly approaches the dog. Various debris and tools are scattered on the ground, with a metal stretcher nearby.", "visual_elements": [ { "content": "The camera then cuts to a scene below the skylight.", "type": "camera", "weight": 3 }, { "content": "A man in gray clothing is holding a long pole against a large Saint Bernard dog.", "type": "action", "weight": 3 }, { "content": "The dog is struggling.", "type": "action", "weight": 3 }, { "content": "A man in green clothing climbs down the stairs nearby.", "type": "action", "weight": 2 }, { "content": "The man in green slowly approaches the dog.", "type": "action", "weight": 1 }, { "content": "Various debris and tools are scattered on the ground.", "type": "scene", "weight": 1 }, { "content": "A metal stretcher is nearby.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts again. A man in a blue lab coat opens the door and walks out of the room. There is a row of barriers in front of the door, and the man in the blue lab coat looks down at the scene below. The walls outside the room are adorned with anatomical charts and drawings of animal skeletons.", "visual_elements": [ { "content": "The camera then cuts.", "type": "camera", "weight": 3 }, { "content": "A man in a blue lab coat opens the door and steps out of the room.", "type": "action", "weight": 3 }, { "content": "There is a row of barriers in front of the door.", "type": "attribute", "weight": 2 }, { "content": "The man in the blue lab coat looks down at the scene below.", "type": "action", "weight": 3 }, { "content": "The walls outside the room are adorned with anatomical charts and drawings of animal skeletons.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the scene with the man in gray and the man in green. The man in gray still holds the long pole, while the man in green holds a metal fork. Both men look up and to the right at the same time, with the man in green glancing at the man in gray. They continue manipulating the tools in their hands.", "visual_elements": [ { "content": "The camera cuts back to the scene with the man in gray and the man in green.", "type": "camera", "weight": 3 }, { "content": "The man in gray still holds the long pole, while the man in green holds a metal cross.", "type": "attribute", "weight": 3 }, { "content": "Both men look to their upper right at the same time.", "type": "action", "weight": 3 }, { "content": "The man in green glances at the man in gray.", "type": "action", "weight": 2 }, { "content": "Both men continue manipulating the tools in their hands.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera returns to the man in the blue lab coat, showing a shot from above his shoulders. He tilts his head, his expression serious. The door behind him gradually closes.", "visual_elements": [ { "content": "Finally, the camera returns to the man in the blue lab coat.", "type": "camera", "weight": 3 }, { "content": "The camera shows a shot from above the man's shoulders.", "type": "camera", "weight": 3 }, { "content": "The man in the blue lab coat tilts his head, his expression serious.", "type": "attribute", "weight": 1 }, { "content": "The door behind the man in the blue lab coat gradually closes.", "type": "scene", "weight": 1 } ] } ], "n_events": 5, "n_elements": 26 }, { "index": "TUNA_0348", "video_path": "VELOCITI/v_RehVwEopIQ4_seg_120_130.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a wooden decorative piece, behind which a man is watching through it.\nThen, the camera cuts to an outdoor scene with blurred edges and a clear center. A white news van with the logo “KXBD 6 News at 6” is visible by the roadside. Next to the van is a set-up camera, and a military green vehicle passes in front of the lens. In the background, greenery and a pedestrian path are visible. A woman with a bag on her right shoulder and a bag in her left hand walks along the sidewalk. The camera moves to the right, where a person is standing by the front passenger door of the news van, making a phone call.\nNext, the camera cuts back indoors, where a man in a black suit suddenly turns to look inside. Behind him is an ornately decorated wall. The man in the suit turns again to look outside, then steps back while closing the door in front of him. He then turns and walks further into the room.", "events": [ { "event": "The video begins with the camera focused on a wooden decorative piece, behind which a man is watching through it.", "visual_elements": [ { "content": "The video begins with the camera focused on a wooden decorative piece.", "type": "camera", "weight": 3 }, { "content": "Behind the wooden decoration, there is a man.", "type": "attribute", "weight": 3 }, { "content": "The man looks outside through the wooden decoration.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera cuts to an outdoor scene with blurred edges and a clear center. A white news van with the logo “KXBD 6 News at 6” is visible by the roadside. Next to the van is a set-up camera, and a military green vehicle passes in front of the lens. In the background, greenery and a pedestrian path are visible. A woman with a bag on her right shoulder and a bag in her left hand walks along the sidewalk. The camera moves to the right, where a person is standing by the front passenger door of the news van, making a phone call.", "visual_elements": [ { "content": "Then, the camera cuts to an outdoor scene.", "type": "camera", "weight": 3 }, { "content": "The edges of the frame are blurred, with a clear center.", "type": "attribute", "weight": 3 }, { "content": "A white news van is visible by the roadside.", "type": "attribute", "weight": 3 }, { "content": "The van has the logo “KXBD 6 News at 6” on its side.", "type": "attribute", "weight": 2 }, { "content": "A set-up camera stands beside the van.", "type": "scene", "weight": 2 }, { "content": "A military green vehicle passes in front of the lens.", "type": "action", "weight": 3 }, { "content": "Greenery and a pedestrian walkway are visible in the background.", "type": "scene", "weight": 2 }, { "content": "A woman with a bag on her right shoulder and another in her left hand walks along the sidewalk.", "type": "action", "weight": 2 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "A person is standing outside the front passenger door of the news van.", "type": "attribute", "weight": 2 }, { "content": "The person is making a phone call.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera cuts back indoors, where a man in a black suit suddenly turns to look inside. Behind him is an ornately decorated wall. The man in the suit turns again to look outside, then steps back while closing the door in front of him. He then turns and walks further into the room.", "visual_elements": [ { "content": "Next, the camera cuts back to the interior scene.", "type": "camera", "weight": 3 }, { "content": "A man in a black suit suddenly turns around, looking inside.", "type": "attribute", "weight": 3 }, { "content": "Behind the man in the suit is an ornately decorated wall.", "type": "attribute", "weight": 2 }, { "content": "The man in the suit turns again to look outside.", "type": "action", "weight": 3 }, { "content": "The man then steps back while closing the door in front of him.", "type": "action", "weight": 3 }, { "content": "The man in the suit turns and walks further into the room.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0349", "video_path": "VELOCITI/v_RehVwEopIQ4_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on an interior scene of a museum. In the background, a dinosaur skeleton exhibit is visible, along with some visitors and children. In the center of the frame, a middle-aged man in a dark suit holds a bag on his right arm and a gun in his left hand. He lowers his right shoulder, causing the bag on his right arm to fall off, and quickly moves forward. The camera follows his movement from left to right as he crosses the museum hall, runs to the entrance, and shakes the door handle, trying to open the door.\nThe camera cuts to another bald man in a blue shirt who, after opening the door, runs outside.\nNext, the camera cuts to a woman in a blue shirt exiting a car and quickly running to the front of the vehicle. She turns to face the camera and looks ahead, with a camera visible to her front right. The camera moves left, following her. Plants and buildings are visible in the background.\nThe camera cuts to an exterior view of a building, where the bald man in a blue shirt runs out from inside. He holds a black jacket in his left hand and clutches his abdomen with his right. He approaches the pillar to the right of the doorway and steadies himself with his left hand on the pillar.\nThe video ends with a cut to a scene in a surveillance room. The center monitor displays a full view of the museum entrance, while the monitor on the right shows a news anchor seated in front of a green screen.", "events": [ { "event": "The video begins with the camera focused on an interior scene of a museum. In the background, a dinosaur skeleton exhibit is visible, along with some visitors and children. In the center of the frame, a middle-aged man in a dark suit holds a bag on his right arm and a gun in his left hand. He lowers his right shoulder, causing the bag on his right arm to fall off, and quickly moves forward. The camera follows his movement from left to right as he crosses the museum hall, runs to the entrance, and shakes the door handle, trying to open the door.", "visual_elements": [ { "content": "The video begins with the camera focused on the interior of a museum.", "type": "camera", "weight": 3 }, { "content": "A dinosaur skeleton exhibit is visible in the background.", "type": "scene", "weight": 2 }, { "content": "Some visitors and children are also visible in the background.", "type": "scene", "weight": 2 }, { "content": "In the center of the frame, a middle-aged man in a dark suit holds a bag in his right hand and a gun in his left.", "type": "attribute", "weight": 3 }, { "content": "The man lowers his right shoulder, causing the bag to fall off his right arm.", "type": "action", "weight": 3 }, { "content": "The man in the dark suit quickly moves forward.", "type": "action", "weight": 3 }, { "content": "The camera follows the man’s movement from left to right.", "type": "camera", "weight": 3 }, { "content": "The man crosses the hall and reaches the glass door.", "type": "action", "weight": 3 }, { "content": "The man bends to look outside and shakes the door handle, trying to open the door.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to another bald man in a blue shirt who, after opening the door, runs outside.", "visual_elements": [ { "content": "The camera cuts to another bald man in a blue shirt.", "type": "camera", "weight": 3 }, { "content": "The bald man opens the door and runs outside.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a woman in a blue shirt exiting a car and quickly running to the front of the vehicle. She turns to face the camera and looks ahead, with a camera visible to her front right. The camera moves left, following her. Plants and buildings are visible in the background.", "visual_elements": [ { "content": "Next, the camera cuts to a woman in a blue shirt exiting a car.", "type": "camera", "weight": 3 }, { "content": "The woman in the blue shirt quickly runs toward the front of the car.", "type": "action", "weight": 3 }, { "content": "The woman turns to face the camera and looks ahead.", "type": "action", "weight": 2 }, { "content": "A camera is visible to her front right.", "type": "attribute", "weight": 2 }, { "content": "The camera follows the woman as it moves left.", "type": "camera", "weight": 3 }, { "content": "Plants and buildings are visible in the background.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to an exterior view of a building, where the bald man in a blue shirt runs out from inside. He holds a black jacket in his left hand and clutches his abdomen with his right. He approaches the pillar to the right of the doorway and steadies himself with his left hand on the pillar.", "visual_elements": [ { "content": "The camera cuts to an exterior view of a building.", "type": "camera", "weight": 3 }, { "content": "The bald man in the blue shirt runs out from inside the building.", "type": "action", "weight": 3 }, { "content": "The bald man holds a black jacket in his left hand and clutches his abdomen with his right.", "type": "attribute", "weight": 3 }, { "content": "The bald man approaches the pillar to the right of the doorway and steadies himself with his left hand.", "type": "action", "weight": 3 } ] }, { "event": "The video ends with a cut to a scene in a surveillance room. The center monitor displays a full view of the museum entrance, while the monitor on the right shows a news anchor seated in front of a green screen.", "visual_elements": [ { "content": "The video ends with a cut to a scene in a surveillance room.", "type": "camera", "weight": 3 }, { "content": "The center monitor displays a full view of the museum entrance.", "type": "attribute", "weight": 3 }, { "content": "The right monitor shows a news anchor seated in front of a green screen.", "type": "attribute", "weight": 3 } ] } ], "n_events": 5, "n_elements": 24 }, { "index": "TUNA_0350", "video_path": "VELOCITI/v_RehVwEopIQ4_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a man sitting behind a news anchor desk. The man behind the news desk is dressed in a brown suit jacket, white shirt, and a patterned brown tie. The background is a green wall with the words “KXBD News at 6” printed on it. The man at the news desk speaks while pointing to his right ear with his right hand.\nNext, the camera cuts to the back view of another man dressed in a dark suit. The man in the dark suit rests his right hand on the door frame, looking outside, and repeatedly turns between looking inside and outside. While watching outside, he steps back, then turns and runs inside. A large oil painting depicting several figures and horses hangs on the wall inside. The camera follows the man as he moves left, pointing to a woman on the left side of the frame. The woman, dressed in a white top and black skirt, covers her mouth with her right hand and steps back twice. The camera follows the man in the dark suit as he moves right, stepping behind a counter and heading toward the right side of the frame.", "events": [ { "event": "The video begins with the camera focused on a man sitting behind a news anchor desk. The man behind the news desk is dressed in a brown suit jacket, white shirt, and a patterned brown tie. The background is a green wall with the words “KXBD News at 6” printed on it. The man at the news desk speaks while pointing to his right ear with his right hand.", "visual_elements": [ { "content": "The video begins with the camera focused on a man sitting behind a news anchor desk.", "type": "camera", "weight": 3 }, { "content": "The man behind the news desk is wearing a brown suit jacket, a white shirt, and a patterned brown tie.", "type": "attribute", "weight": 3 }, { "content": "The background is a green wall with the words “KXBD News at 6” written on it.", "type": "scene", "weight": 2 }, { "content": "The man behind the news desk speaks while pointing to his right ear with his right hand.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to the back view of another man dressed in a dark suit. The man in the dark suit rests his right hand on the door frame, looking outside, and repeatedly turns between looking inside and outside. While watching outside, he steps back, then turns and runs inside. A large oil painting depicting several figures and horses hangs on the wall inside. The camera follows the man as he moves left, pointing to a woman on the left side of the frame. The woman, dressed in a white top and black skirt, covers her mouth with her right hand and steps back twice. The camera follows the man in the dark suit as he moves right, stepping behind a counter and heading toward the right side of the frame.", "visual_elements": [ { "content": "Next, the camera cuts to the back view of another man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a dark suit.", "type": "attribute", "weight": 2 }, { "content": "The man in the dark suit rests his right hand on the door frame, looking outside.", "type": "attribute", "weight": 3 }, { "content": "The man repeatedly turns, sometimes looking inside, sometimes outside.", "type": "action", "weight": 3 }, { "content": "The man steps back while looking outside.", "type": "action", "weight": 3 }, { "content": "The man turns and runs inside.", "type": "action", "weight": 3 }, { "content": "A large oil painting hangs on the wall inside the room.", "type": "scene", "weight": 2 }, { "content": "The painting depicts several figures and horses.", "type": "scene", "weight": 2 }, { "content": "The camera follows the man as he moves to the left.", "type": "camera", "weight": 3 }, { "content": "The man in the dark suit points to a woman on the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The woman is dressed in a white top and a black skirt.", "type": "attribute", "weight": 2 }, { "content": "The woman in the white top covers her mouth with her right hand and steps back twice.", "type": "action", "weight": 2 }, { "content": "The camera follows the man in the dark suit as he moves to the right.", "type": "camera", "weight": 3 }, { "content": "The man in the dark suit walks behind the counter.", "type": "action", "weight": 3 }, { "content": "The man walks toward the right side of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 19 }, { "index": "TUNA_0351", "video_path": "VELOCITI/v_RyZ-saoiIzY_seg_15_25.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a focus on a young woman in tattered clothing, kneeling on one knee with her head down. Thick fog swirls behind her. The background shows a large industrial building with metal structures and windows, allowing light from outside to filter through.\nThe camera zooms in as the young woman slowly lifts her head, mouth open in a roar. She looks toward her front right, closes her mouth, and pulls off a metal strip tied to her right arm with her left hand.\nThe camera cuts to a dimly lit interior scene where a man in black clothing rushes in from a door on the left side of the frame. The man runs forward quickly, and the camera follows him to the right. A blonde woman follows closely behind him. The man then stops and turns to look at the blonde woman. She is dressed in a white undergarment and dark coat. After facing each other, they both glance back, then turn and continue running forward.\nThe camera cuts back to the industrial building, showing an overhead view of the young woman from above her shoulders. She stands still, slowly lifting her head.", "events": [ { "event": "The video begins with a focus on a young woman in tattered clothing, kneeling on one knee with her head down. Thick fog swirls behind her. The background shows a large industrial building with metal structures and windows, allowing light from outside to filter through.", "visual_elements": [ { "content": "The video begins with a focus on a young woman in tattered clothing.", "type": "camera", "weight": 3 }, { "content": "The young woman has her head down, kneeling on one knee.", "type": "attribute", "weight": 3 }, { "content": "Thick fog swirls behind the young woman.", "type": "scene", "weight": 2 }, { "content": "The background is a large industrial building.", "type": "scene", "weight": 2 }, { "content": "There are metal structures and windows all around.", "type": "scene", "weight": 2 }, { "content": "Light from outside is visible through the windows.", "type": "scene", "weight": 1 } ] }, { "event": "The camera zooms in as the young woman slowly lifts her head, mouth open in a roar. She looks toward her front right, closes her mouth, and pulls off a metal strip tied to her right arm with her left hand.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The young woman slowly lifts her head, her mouth wide open in a roar.", "type": "action", "weight": 3 }, { "content": "The young woman looks to her front right and closes her mouth.", "type": "action", "weight": 3 }, { "content": "The young woman pulls off a metal strip bound to her right arm with her left hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a dimly lit interior scene where a man in black clothing rushes in from a door on the left side of the frame. The man runs forward quickly, and the camera follows him to the right. A blonde woman follows closely behind him. The man then stops and turns to look at the blonde woman. She is dressed in a white undergarment and dark coat. After facing each other, they both glance back, then turn and continue running forward.", "visual_elements": [ { "content": "The camera cuts to a dimly lit interior scene.", "type": "camera", "weight": 3 }, { "content": "A man in black clothing rushes in from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The man in black quickly runs forward.", "type": "action", "weight": 3 }, { "content": "The camera follows the man as he moves to the right.", "type": "camera", "weight": 3 }, { "content": "A blonde woman follows the man from behind.", "type": "action", "weight": 3 }, { "content": "The man in black stops, turning to look at the blonde woman behind him.", "type": "action", "weight": 3 }, { "content": "The blonde woman is wearing a white undergarment and a dark coat.", "type": "attribute", "weight": 2 }, { "content": "After facing each other, they both look behind them.", "type": "action", "weight": 3 }, { "content": "They then turn around and continue running forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the industrial building, showing an overhead view of the young woman from above her shoulders. She stands still, slowly lifting her head.", "visual_elements": [ { "content": "The camera cuts back to the industrial building.", "type": "camera", "weight": 3 }, { "content": "The camera shows an overhead view from above the woman’s shoulders.", "type": "camera", "weight": 3 }, { "content": "The young woman stands still, slowly lifting her head.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0352", "video_path": "VELOCITI/v_RyZ-saoiIzY_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a group of people running forward, escaping from a building behind them. The group includes men and women of various ages, all wearing modern clothing. In the background, the stone walls and steps of the building are visible, with thick gray smoke billowing out.\nNext, the camera cuts to a dimly lit underground crypt, filled with stone coffins and statues. The green lighting inside the crypt is faint, and several people are moving around inside.\nThe camera cuts to a close-up of several decayed corpses placed on a platform covered with plastic sheets, with different numbered tags above each corpse's head. The camera moves upward as a man in a dark coat enters from the left side, raising a cell phone in his left hand to his left ear.\nFinally, the camera cuts to the interior of an industrial-style building, with the camera zooming in and moving upward. A man in black clothing is talking on the phone while quickly ascending a staircase. In the background, there are pipes and equipment. After reaching the top, the man turns and continues walking to the right, with other people visible in the background, either walking or working.", "events": [ { "event": "The video begins with the camera focused on a group of people running forward, escaping from a building behind them. The group includes men and women of various ages, all wearing modern clothing. In the background, the stone walls and steps of the building are visible, with thick gray smoke billowing out.", "visual_elements": [ { "content": "The video begins with the camera focused on a group of people.", "type": "camera", "weight": 3 }, { "content": "The group is running forward, escaping from a building behind them.", "type": "action", "weight": 3 }, { "content": "The group includes men and women of various ages, all wearing modern clothing.", "type": "attribute", "weight": 2 }, { "content": "The stone walls and steps of the building are visible in the background.", "type": "scene", "weight": 2 }, { "content": "Thick gray smoke is billowing out of the building.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts to a dimly lit underground crypt, filled with stone coffins and statues. The green lighting inside the crypt is faint, and several people are moving around inside.", "visual_elements": [ { "content": "Next, the camera cuts to a dimly lit underground crypt.", "type": "camera", "weight": 3 }, { "content": "The crypt contains numerous stone coffins and statues.", "type": "attribute", "weight": 3 }, { "content": "The green lighting in the crypt is dim.", "type": "attribute", "weight": 2 }, { "content": "Several people are moving around in the crypt.", "type": "action", "weight": 1 } ] }, { "event": "The camera cuts to a close-up of several decayed corpses placed on a platform covered with plastic sheets, with different numbered tags above each corpse's head. The camera moves upward as a man in a dark coat enters from the left side, raising a cell phone in his left hand to his left ear.", "visual_elements": [ { "content": "The camera cuts to a close-up of several decayed corpses.", "type": "camera", "weight": 3 }, { "content": "The corpses are placed on a platform covered with plastic sheets.", "type": "attribute", "weight": 3 }, { "content": "Different numbered tags are attached above each corpse's head on the platform.", "type": "attribute", "weight": 2 }, { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "A man in a dark coat enters the frame from the left side.", "type": "action", "weight": 3 }, { "content": "The man raises a cell phone in his left hand to his left ear.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera cuts to the interior of an industrial-style building, with the camera zooming in and moving upward. A man in black clothing is talking on the phone while quickly ascending a staircase. In the background, there are pipes and equipment. After reaching the top, the man turns and continues walking to the right, with other people visible in the background, either walking or working.", "visual_elements": [ { "content": "Finally, the camera cuts to the interior of an industrial-style building.", "type": "camera", "weight": 3 }, { "content": "The camera continuously zooms in and moves upward.", "type": "camera", "weight": 3 }, { "content": "A man dressed in black is talking on the phone while quickly ascending a staircase.", "type": "action", "weight": 3 }, { "content": "Pipes and equipment are visible in the background.", "type": "scene", "weight": 1 }, { "content": "After reaching the top, the man in black turns and continues walking to the right side of the frame.", "type": "action", "weight": 3 }, { "content": "Other people are visible in the background, either walking or working.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0353", "video_path": "VELOCITI/v_S-2cloMm4Lk_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a male character standing in front of a brick wall covered in posters. The man looks up as someone falls from above. In the background, several armed fighters are visible, along with laser beams and shooting sparks. The scene is dimly lit.\nThe camera quickly cuts to a blonde female character with pigtails, with pink and blue tips. She is dressed in vibrant clothing, wielding a baseball bat and swinging it forcefully at an enemy.\nA close-up shot shows the female character bending backward to dodge an enemy's axe. Firelight and flying debris are visible in the background.\nNext, the camera cuts to another male character wearing a black jacket, engaged in a standoff with an enemy in front of him. The scene is filled with firelight and smoke, with others fighting in the background.\nThe camera cuts to the man in the black jacket being struck by the enemy, causing his head to tilt backward. His brows are furrowed, and he grits his teeth.\nThe camera cuts as the man in the black jacket turns his head and kicks the enemy behind him before moving forward.\nThe camera cuts to a medium close-up of the man in the black jacket as he pulls out two blades from inside his jacket.", "events": [ { "event": "The video begins with the camera focused on a male character standing in front of a brick wall covered in posters. The man looks up as someone falls from above. In the background, several armed fighters are visible, along with laser beams and shooting sparks. The scene is dimly lit.", "visual_elements": [ { "content": "The video begins with the camera focused on a male character.", "type": "camera", "weight": 3 }, { "content": "The male character stands in front of a brick wall covered in posters.", "type": "attribute", "weight": 2 }, { "content": "The man looks up.", "type": "action", "weight": 2 }, { "content": "Someone falls from the top of the frame.", "type": "action", "weight": 2 }, { "content": "Armed fighters are visible fighting in the background.", "type": "scene", "weight": 2 }, { "content": "Laser beams and shooting sparks are visible in the background.", "type": "scene", "weight": 2 }, { "content": "The scene is dimly lit.", "type": "scene", "weight": 1 } ] }, { "event": "The camera quickly cuts to a blonde female character with pigtails, with pink and blue tips. She is dressed in vibrant clothing, wielding a baseball bat and swinging it forcefully at an enemy.", "visual_elements": [ { "content": "The camera quickly cuts to a blonde female character with pigtails.", "type": "camera", "weight": 3 }, { "content": "The tips of her pigtails are pink and blue.", "type": "attribute", "weight": 2 }, { "content": "The female character is dressed in vibrant clothing.", "type": "attribute", "weight": 1 }, { "content": "The female character holds a baseball bat.", "type": "attribute", "weight": 2 }, { "content": "She swings it forcefully at an enemy.", "type": "action", "weight": 3 } ] }, { "event": "A close-up shot shows the female character bending backward to dodge an enemy's axe. Firelight and flying debris are visible in the background.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The female character bends backward to dodge the enemy's axe.", "type": "action", "weight": 3 }, { "content": "Firelight and flying debris are visible in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera cuts to another male character wearing a black jacket, engaged in a standoff with an enemy in front of him. The scene is filled with firelight and smoke, with others fighting in the background.", "visual_elements": [ { "content": "Next, the camera cuts to another male character.", "type": "camera", "weight": 3 }, { "content": "The male character is wearing a black jacket.", "type": "attribute", "weight": 2 }, { "content": "The man in the black jacket is engaged in a standoff with an enemy in front of him.", "type": "action", "weight": 3 }, { "content": "The scene is filled with firelight and smoke.", "type": "scene", "weight": 1 }, { "content": "There are others fighting in the background.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to the man in the black jacket being struck by the enemy, causing his head to tilt backward. His brows are furrowed, and he grits his teeth.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man in the black jacket is hit by the enemy, causing his head to tilt backward.", "type": "action", "weight": 3 }, { "content": "The man in the black jacket furrows his brow and grits his teeth.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts as the man in the black jacket turns his head and kicks the enemy behind him before moving forward.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man in the black jacket turns and kicks the enemy behind him before moving forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a medium close-up of the man in the black jacket as he pulls out two blades from inside his jacket.", "visual_elements": [ { "content": "The camera cuts to a medium close-up of the man in the black jacket.", "type": "camera", "weight": 3 }, { "content": "The man pulls out two blades from inside his jacket.", "type": "action", "weight": 3 } ] } ], "n_events": 7, "n_elements": 27 }, { "index": "TUNA_0354", "video_path": "VELOCITI/v_S32b64ns3fM_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins in a dimly lit room. Several paintings hang on the walls, and a man and a woman stand in the center. The man on the left wears an orange sleeveless T-shirt and gray pants, looking down at something in his hand. The woman on the right is in a denim jacket and dark pants, holding a black bag. The camera pulls back, revealing a large bed in the room, covered with red sheets and blankets, with cash and a basketball on it. The woman takes two bundles of cash from her bag, and the man reaches out with his right hand to take them. The woman steps back, turns toward the door while tossing cash behind her. The man crouches to pick up the cash, and the woman opens the door, exits, and closes it behind her. The man kneels on the bed and throws the cash onto it.\nThe camera cuts to an overhead shot of a basketball court, brightly lit with clean, polished floors. A man in a white basketball uniform stands at the free throw line. The referee approaches, hands the player the ball, then steps back while looking to his right front. The camera moves right, revealing another player in a white basketball uniform on the right side of the frame.", "events": [ { "event": "The video begins in a dimly lit room. Several paintings hang on the walls, and a man and a woman stand in the center. The man on the left wears an orange sleeveless T-shirt and gray pants, looking down at something in his hand. The woman on the right is in a denim jacket and dark pants, holding a black bag. The camera pulls back, revealing a large bed in the room, covered with red sheets and blankets, with cash and a basketball on it. The woman takes two bundles of cash from her bag, and the man reaches out with his right hand to take them. The woman steps back, turns toward the door while tossing cash behind her. The man crouches to pick up the cash, and the woman opens the door, exits, and closes it behind her. The man kneels on the bed and throws the cash onto it.", "visual_elements": [ { "content": "The camera focuses on a dimly lit room.", "type": "camera", "weight": 3 }, { "content": "Several paintings hang on the walls.", "type": "attribute", "weight": 1 }, { "content": "A man and a woman stand in the center of the room.", "type": "attribute", "weight": 3 }, { "content": "The man on the left is wearing an orange sleeveless T-shirt and gray pants.", "type": "attribute", "weight": 2 }, { "content": "The man looks down at something in his hand.", "type": "attribute", "weight": 2 }, { "content": "The woman on the right is wearing a denim jacket and dark pants.", "type": "attribute", "weight": 2 }, { "content": "The woman is holding a black bag.", "type": "attribute", "weight": 2 }, { "content": "The camera pulls back.", "type": "camera", "weight": 3 }, { "content": "A large bed appears in the frame.", "type": "attribute", "weight": 2 }, { "content": "The bed is covered with red sheets and blankets.", "type": "scene", "weight": 2 }, { "content": "There are stacks of cash and a basketball on the bed.", "type": "scene", "weight": 2 }, { "content": "The woman takes two bundles of cash from the black bag.", "type": "action", "weight": 3 }, { "content": "The man extends his right hand to take the cash.", "type": "action", "weight": 2 }, { "content": "The woman steps back two steps.", "type": "action", "weight": 3 }, { "content": "The woman turns and heads toward the door, tossing the cash onto the floor behind her.", "type": "action", "weight": 3 }, { "content": "The man crouches to pick up the cash on the floor.", "type": "action", "weight": 3 }, { "content": "The woman opens the door, exits the room, and closes it behind her.", "type": "action", "weight": 3 }, { "content": "The man kneels on the bed and tosses the cash onto it.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to an overhead shot of a basketball court, brightly lit with clean, polished floors. A man in a white basketball uniform stands at the free throw line. The referee approaches, hands the player the ball, then steps back while looking to his right front. The camera moves right, revealing another player in a white basketball uniform on the right side of the frame.", "visual_elements": [ { "content": "The camera cuts to an overhead view of a basketball court.", "type": "camera", "weight": 3 }, { "content": "The basketball court is brightly lit.", "type": "attribute", "weight": 1 }, { "content": "The floor is clean and polished.", "type": "attribute", "weight": 1 }, { "content": "A man in a white basketball uniform stands at the free throw line.", "type": "attribute", "weight": 3 }, { "content": "The referee walks up to the player and hands him the basketball.", "type": "action", "weight": 3 }, { "content": "The referee steps back while looking to his front right.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "Another player in a white basketball uniform appears on the right side of the frame.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 26 }, { "index": "TUNA_0355", "video_path": "VELOCITI/v_SBIpGdJA_5Q_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The scene opens with the camera zoomed in on a bar counter, where four people sit facing the right side of the scene. Closest to the camera is a middle-aged woman wearing a blue leopard-print top, with a pair of round white hollow earrings. The woman holds a cocktail in her left hand. To the woman’s right is a white-haired man wearing a blue shirt, holding a glass in his right hand. Next to the white-haired man is a man in a brown jacket, wearing a black hat and glasses, with a bottle of beer and a glass in front of him. Farthest from the camera is a man in a khaki blazer, resting his right arm on the bar.\nThe camera cuts to a man in a denim jacket with a blue bow tie, standing in front of a wooden background. The man speaks angrily to someone across from him.\nThe camera cuts to a bald man. The bald man, wearing a black sleeveless top, stands behind the bar. The man in the denim jacket suddenly stands up, grabs the bald man's collar, and pulls him closer. The bald man raises his left index finger, speaking to the man in the denim jacket.\nThe camera cuts to a Black woman in a black leather jacket with black dreadlocks. In front of her is a glass of red drink. She first smiles broadly, then raises her left hand to cover her mouth to hide her smile.\nThe camera returns to the man in the denim jacket and the bald man.", "events": [ { "event": "The scene opens with the camera zoomed in on a bar counter, where four people sit facing the right side of the scene. Closest to the camera is a middle-aged woman wearing a blue leopard-print top, with a pair of round white hollow earrings. The woman holds a cocktail in her left hand. To the woman’s right is a white-haired man wearing a blue shirt, holding a glass in his right hand. Next to the white-haired man is a man in a brown jacket, wearing a black hat and glasses, with a bottle of beer and a glass in front of him. Farthest from the camera is a man in a khaki blazer, resting his right arm on the bar.", "visual_elements": [ { "content": "The scene opens with the camera focused on a bar counter.", "type": "camera", "weight": 3 }, { "content": "Four people sit at the bar, all looking toward the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "Closest to the camera is a middle-aged woman wearing a blue leopard-print top.", "type": "attribute", "weight": 3 }, { "content": "The woman is wearing a pair of round white hollow earrings.", "type": "attribute", "weight": 2 }, { "content": "The woman holds a cocktail in her left hand.", "type": "attribute", "weight": 1 }, { "content": "To the woman’s right is a white-haired man wearing a blue shirt.", "type": "attribute", "weight": 3 }, { "content": "The white-haired man holds a glass in his right hand.", "type": "attribute", "weight": 1 }, { "content": "To the right of the white-haired man is a man in a brown jacket, wearing a black hat and glasses.", "type": "attribute", "weight": 3 }, { "content": "In front of the man in the brown jacket is a bottle of beer and a glass.", "type": "attribute", "weight": 2 }, { "content": "Farthest from the camera is a man wearing a khaki blazer.", "type": "attribute", "weight": 3 }, { "content": "The man rests his right arm on the bar.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a man in a denim jacket with a blue bow tie, standing in front of a wooden background. The man speaks angrily to someone across from him.", "visual_elements": [ { "content": "The camera cuts to a man in a denim jacket with a blue bow tie.", "type": "camera", "weight": 3 }, { "content": "Behind the man is a wooden background.", "type": "scene", "weight": 1 }, { "content": "The man speaks angrily to someone across from him.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a bald man. The bald man, wearing a black sleeveless top, stands behind the bar. The man in the denim jacket suddenly stands up, grabs the bald man's collar, and pulls him closer. The bald man raises his left index finger, speaking to the man in the denim jacket.", "visual_elements": [ { "content": "The camera cuts to a bald man.", "type": "camera", "weight": 3 }, { "content": "The bald man is wearing a black sleeveless top.", "type": "attribute", "weight": 2 }, { "content": "The bald man stands behind the bar.", "type": "attribute", "weight": 2 }, { "content": "The man in the denim jacket, with his back to the camera, suddenly stands up.", "type": "action", "weight": 3 }, { "content": "The man in the denim jacket grabs the bald man’s collar, pulling him closer.", "type": "action", "weight": 3 }, { "content": "The bald man raises his left index finger and speaks to the man in the denim jacket.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a Black woman in a black leather jacket with black dreadlocks. In front of her is a glass of red drink. She first smiles broadly, then raises her left hand to cover her mouth to hide her smile.", "visual_elements": [ { "content": "The camera cuts to a Black woman in a black leather jacket.", "type": "camera", "weight": 3 }, { "content": "The Black woman has black dreadlocks.", "type": "attribute", "weight": 2 }, { "content": "In front of the Black woman is a glass of red drink.", "type": "attribute", "weight": 1 }, { "content": "The Black woman first smiles broadly.", "type": "action", "weight": 2 }, { "content": "The Black woman then raises her left hand to cover her mouth, hiding her smile.", "type": "action", "weight": 3 } ] }, { "event": "The camera returns to the man in the denim jacket and the bald man.", "visual_elements": [ { "content": "The camera returns to the man in the denim jacket and the bald man.", "type": "camera", "weight": 3 } ] } ], "n_events": 5, "n_elements": 26 }, { "index": "TUNA_0356", "video_path": "VELOCITI/v_SEn179T1Apk_seg_135_145.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The scene opens with a woman in a black tank top sitting on the ground, with a stone wall in the background. The woman in the black tank top backs away and quickly stands up while turning around. There are scars on her shoulder.\nThe camera then pans to a person wearing full armor, holding a flail. The armored person swings the flail and smashes it onto the ground.\nThe camera cuts back to the woman in the black tank top, who flips over and then stands up, running forward. The woman in the black tank top has her back to the warrior.\nThe camera cuts to a high-angle view, where a group of people in brown and gray clothing stand on a stone wall looking down, shouting and waving their fists excitedly.\nThe camera cuts back to the battle scene, where the warrior and the woman face each other on an open area in front of a stone wall. The woman runs toward the right side of the scene, while the armored person stands in the shadows on the left side of the scene.\nThe camera quickly cuts to the back of the woman, facing the armored person who swings the flail in her direction.\nThe camera cuts to the front of the woman as she bends down to dodge the incoming flail.\nThe camera cuts again, showing the armored person swinging the flail to the left side of the scene. The woman bends down and moves from behind the armored person toward the right side of the scene.", "events": [ { "event": "The scene opens with a woman in a black tank top sitting on the ground, with a stone wall in the background. The woman in the black tank top backs away and quickly stands up while turning around. There are scars on her shoulder.", "visual_elements": [ { "content": "The camera focuses on a woman in a black tank top sitting on the ground.", "type": "camera", "weight": 3 }, { "content": "The background is a stone wall.", "type": "scene", "weight": 2 }, { "content": "The woman in the black tank top backs away and quickly stands up while turning around.", "type": "action", "weight": 3 }, { "content": "There are scars on the woman’s shoulder.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera then pans to a person wearing full armor, holding a flail. The armored person swings the flail and smashes it onto the ground.", "visual_elements": [ { "content": "The camera then pans to a person wearing full armor.", "type": "camera", "weight": 3 }, { "content": "The armored person holds a flail.", "type": "attribute", "weight": 3 }, { "content": "The armored person swings the flail and smashes it onto the ground.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the woman in the black tank top, who flips over and then stands up, running forward. The woman in the black tank top has her back to the warrior.", "visual_elements": [ { "content": "The camera cuts back to the woman in the black tank top.", "type": "camera", "weight": 3 }, { "content": "The woman in the black tank top flips over, stands up, and runs forward.", "type": "action", "weight": 3 }, { "content": "The woman in the black tank top has her back to the warrior.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a high-angle view, where a group of people in brown and gray clothing stand on a stone wall looking down, shouting and waving their fists excitedly.", "visual_elements": [ { "content": "The camera cuts to a high-angle view.", "type": "camera", "weight": 3 }, { "content": "A group of people in brown and gray clothing stands on the stone wall, looking down.", "type": "attribute", "weight": 3 }, { "content": "The people shout excitedly and wave their fists.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the battle scene, where the warrior and the woman face each other on an open area in front of a stone wall. The woman runs toward the right side of the scene, while the armored person stands in the shadows on the left side of the scene.", "visual_elements": [ { "content": "The camera cuts back to the battle scene.", "type": "camera", "weight": 3 }, { "content": "The warrior and the woman face each other in an open area in front of a stone wall.", "type": "attribute", "weight": 3 }, { "content": "The woman runs toward the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The armored person stands in the shadows on the left side of the scene.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera quickly cuts to the back of the woman, facing the armored person who swings the flail in her direction.", "visual_elements": [ { "content": "The camera quickly cuts to the back of the woman.", "type": "camera", "weight": 3 }, { "content": "The armored person in front of the woman swings the flail.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the front of the woman as she bends down to dodge the incoming flail.", "visual_elements": [ { "content": "The camera cuts to the front of the woman.", "type": "camera", "weight": 3 }, { "content": "The woman bends down to dodge the incoming flail.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again, showing the armored person swinging the flail to the left side of the scene. The woman bends down and moves from behind the armored person toward the right side of the scene.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The armored person swings the flail to the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The woman bends down and moves from behind the armored person toward the right side of the scene.", "type": "action", "weight": 3 } ] } ], "n_events": 8, "n_elements": 24 }, { "index": "TUNA_0357", "video_path": "VELOCITI/v_SEn179T1Apk_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The scene opens with a low-angle shot of two people facing off in an open area in front of a stone wall, covered with rubble and sharp wooden stakes. Blinding sunlight enters from the upper left corner of the scene. The tall stone wall is filled with onlookers, and several flags are planted on it.\nThe camera cuts to a medium-close shot of a young woman in black tight-fitting clothes.\nThe camera cuts to show the young woman facing an armored person covered in heavy armor, holding a spear and shield. The armored person thrusts the spear to attack the woman, who steps back to dodge the attack.\nThe camera cuts to a close-up of the upper body of the armored person, with armor covering the head and body.\nThe camera cuts to the woman, who jumps to dodge the swinging spear of the armored person.\nThe camera cuts to a medium-close shot of the armored person.\nThe camera cuts to show the full battle scene. The armored person strikes at the woman with the spear, and she bends back to dodge the attack. She then takes two steps back and turns around.\nThe camera cuts to three old men behind a railing, looking forward with serious expressions.\nThe camera cuts to a long shot, where the crowd on the wall waves their arms excitedly. Below the stone wall, the armored person raises the spear high, thrusting it toward the woman.\nThe camera cuts to the woman, who dodges the spear attack, falls to the ground, and grits her teeth as she backs away.\nThe camera cuts to a group of spectators behind the railing, shouting excitedly and waving their arms.\nThe camera cuts to the armored person raising the spear, preparing to attack the woman on the ground again.\nThe camera cuts to the woman lying on the ground, anxiously backing away.\nThe camera cuts to several old men on the wall, watching everything with indifferent expressions.", "events": [ { "event": "The scene opens with a low-angle shot of two people facing off in an open area in front of a stone wall, covered with rubble and sharp wooden stakes. Blinding sunlight enters from the upper left corner of the scene. The tall stone wall is filled with onlookers, and several flags are planted on it.", "visual_elements": [ { "content": "The camera captures a low-angle shot of two people facing off in an open area in front of a stone wall.", "type": "camera", "weight": 3 }, { "content": "The open area is filled with rubble and sharp stakes.", "type": "attribute", "weight": 1 }, { "content": "Blinding sunlight enters from the upper left corner of the scene.", "type": "attribute", "weight": 2 }, { "content": "The tall stone wall is crowded with onlookers.", "type": "scene", "weight": 2 }, { "content": "Several flags are planted on the stone wall.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a medium-close shot of a young woman in black tight-fitting clothes.", "visual_elements": [ { "content": "The camera cuts to a medium-close shot of a young woman in black tight-fitting clothes.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to show the young woman facing an armored person covered in heavy armor, holding a spear and shield. The armored person thrusts the spear to attack the woman, who steps back to dodge the attack.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The young woman and a person in heavy armor face each other.", "type": "attribute", "weight": 3 }, { "content": "The armored person holds a spear and a shield.", "type": "attribute", "weight": 3 }, { "content": "The armored person thrusts the spear to attack the woman.", "type": "action", "weight": 3 }, { "content": "The woman steps back to dodge the attack.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up of the upper body of the armored person, with armor covering the head and body.", "visual_elements": [ { "content": "The camera cuts to a close-up of the armored person’s upper body.", "type": "camera", "weight": 3 }, { "content": "Armor covers the person’s head and body.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to the woman, who jumps to dodge the swinging spear of the armored person.", "visual_elements": [ { "content": "The camera cuts to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman jumps to dodge the swinging spear of the armored person.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a medium-close shot of the armored person.", "visual_elements": [ { "content": "The camera cuts to a medium-close shot of the armored person.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to show the full battle scene. The armored person strikes at the woman with the spear, and she bends back to dodge the attack. She then takes two steps back and turns around.", "visual_elements": [ { "content": "The camera cuts to show the entire battle scene.", "type": "camera", "weight": 3 }, { "content": "The armored person strikes the woman with the spear.", "type": "action", "weight": 3 }, { "content": "The woman bends back to dodge the attack.", "type": "action", "weight": 3 }, { "content": "The woman takes two steps back and then turns around.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to three old men behind a railing, looking forward with serious expressions.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "Three old men stand behind a railing, looking forward with serious expressions.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to a long shot, where the crowd on the wall waves their arms excitedly. Below the stone wall, the armored person raises the spear high, thrusting it toward the woman.", "visual_elements": [ { "content": "The camera cuts to a long shot.", "type": "camera", "weight": 3 }, { "content": "The spectators on the wall wave their arms excitedly.", "type": "action", "weight": 3 }, { "content": "Below the stone wall, the armored person raises the spear high, thrusting it toward the woman.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the woman, who dodges the spear attack, falls to the ground, and grits her teeth as she backs away.", "visual_elements": [ { "content": "The camera cuts to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman dodges the spear attack.", "type": "action", "weight": 3 }, { "content": "The woman falls to the ground, gritting her teeth as she backs away.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a group of spectators behind the railing, shouting excitedly and waving their arms.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A group of spectators behind the railing shouts excitedly and waves their arms.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the armored person raising the spear, preparing to attack the woman on the ground again.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The armored person raises the spear, preparing to attack the woman on the ground again.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the woman lying on the ground, anxiously backing away.", "visual_elements": [ { "content": "The camera cuts to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman lies on the ground, anxiously backing away.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to several old men on the wall, watching everything with indifferent expressions.", "visual_elements": [ { "content": "The camera cuts to several old men on the wall.", "type": "camera", "weight": 3 }, { "content": "They watch everything with indifferent expressions.", "type": "attribute", "weight": 2 } ] } ], "n_events": 14, "n_elements": 36 }, { "index": "TUNA_0358", "video_path": "VELOCITI/v_SWAJPB_5rSs_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The scene opens with the camera focused on an old building with weathered outer walls. A man in a white shirt and dark pants stands on the balcony, holding the railing with one hand and gazing into the distance.\nThe camera then cuts to a courtyard with an old well in the center, surrounded by green plants. A man in a white shirt, black pants, and a khaki beret stands on the right side of the scene, with a gun slung over his shoulder. He looks down at the ground, then walks around the well toward the left side of the scene, exiting from the left.\nThe camera then cuts to a dimly lit indoor scene. A man wearing a beret sits at a table with a light bulb hanging above it. The man at the table is eating, and in the background, a man in a suit enters the scene, approaching the door behind the man who is eating. The suited man stands at the doorway, holding the door frame and speaking to the person inside. The subtitle \"Calo, where is Apollonia?\" appears at the bottom of the scene.", "events": [ { "event": "The scene opens with the camera focused on an old building with weathered outer walls. A man in a white shirt and dark pants stands on the balcony, holding the railing with one hand and gazing into the distance.", "visual_elements": [ { "content": "The camera focuses on an old building.", "type": "camera", "weight": 3 }, { "content": "The building's outer walls are weathered.", "type": "attribute", "weight": 1 }, { "content": "A man in a white shirt and dark pants stands on the balcony.", "type": "attribute", "weight": 3 }, { "content": "The man in dark pants holds the railing with one hand.", "type": "attribute", "weight": 3 }, { "content": "The man in dark pants gazes into the distance.", "type": "action", "weight": 2 } ] }, { "event": "The camera then cuts to a courtyard with an old well in the center, surrounded by green plants. A man in a white shirt, black pants, and a khaki beret stands on the right side of the scene, with a gun slung over his shoulder. He looks down at the ground, then walks around the well toward the left side of the scene, exiting from the left.", "visual_elements": [ { "content": "Then, the camera cuts to a courtyard.", "type": "camera", "weight": 3 }, { "content": "There is an old well in the center of the courtyard.", "type": "attribute", "weight": 2 }, { "content": "Green plants grow around the well.", "type": "attribute", "weight": 2 }, { "content": "A man in a white shirt, black pants, and a khaki beret stands on the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "The man has a gun slung over his shoulder.", "type": "attribute", "weight": 2 }, { "content": "The man in black pants looks down at the ground.", "type": "action", "weight": 2 }, { "content": "Then the man in black pants walks around the well, heading toward the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The man exits from the left side of the scene.", "type": "action", "weight": 2 } ] }, { "event": "The camera then cuts to a dimly lit indoor scene. A man wearing a beret sits at a table with a light bulb hanging above it. The man at the table is eating, and in the background, a man in a suit enters the scene, approaching the door behind the man who is eating. The suited man stands at the doorway, holding the door frame and speaking to the person inside. The subtitle \"Calo, where is Apollonia?\" appears at the bottom of the scene.", "visual_elements": [ { "content": "Then the camera cuts to a dimly lit indoor scene.", "type": "camera", "weight": 3 }, { "content": "A man wearing a beret sits at a table.", "type": "attribute", "weight": 3 }, { "content": "A light bulb hanging above the table is lit.", "type": "scene", "weight": 2 }, { "content": "The man at the table is eating.", "type": "action", "weight": 3 }, { "content": "A man in a suit enters the scene, approaching the door behind the man who is eating.", "type": "action", "weight": 3 }, { "content": "The man in the suit stands at the doorway, holding the door frame and speaking to the person inside.", "type": "action", "weight": 3 }, { "content": "The subtitle \"Calo, where is Apollonia?\" appears at the bottom of the scene.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0359", "video_path": "VELOCITI/v_SbP_EGRp9Kw_seg_135_145.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a young man in a light blue shirt and dark undershirt, looking serious. The background is a blurred set of blinds.\nThen the camera cuts to a kitchen scene, where a middle-aged man sits at the kitchen island, wearing a blue shirt with his hands crossed on the table. A middle-aged woman stands by the kitchen counter in a red long-sleeve top, chopping vegetables. She turns her head to look over her right shoulder.\nThe camera cuts again to show the young man entering from outside the door, holding a black object. The young man walks down a hallway, then turns left. The camera pans right following the young man as he enters the kitchen. He walks to the kitchen island and places the black object on it. There are some kitchen utensils and condiment bottles on the island. The middle-aged man looks up at the young man, and the middle-aged woman turns to look at him.\nAt the end of the video, the camera focuses on the young man, who taps the black object with his left hand. The young man is speaking.", "events": [ { "event": "The video begins with the camera focusing on a young man in a light blue shirt and dark undershirt, looking serious. The background is a blurred set of blinds.", "visual_elements": [ { "content": "The video begins with the camera focusing on a young man", "type": "camera", "weight": 3 }, { "content": "The young man is wearing a light blue shirt and a dark undershirt.", "type": "attribute", "weight": 2 }, { "content": "The young man looks serious.", "type": "attribute", "weight": 1 }, { "content": "The background is a blurred set of blinds.", "type": "scene", "weight": 1 } ] }, { "event": "Then the camera cuts to a kitchen scene, where a middle-aged man sits at the kitchen island, wearing a blue shirt with his hands crossed on the table. A middle-aged woman stands by the kitchen counter in a red long-sleeve top, chopping vegetables. She turns her head to look over her right shoulder.", "visual_elements": [ { "content": "Then, the camera cuts to a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "A middle-aged man sits behind the kitchen island.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged man is wearing a blue shirt.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man has his hands crossed on the table.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the scene, a middle-aged woman stands by the kitchen counter.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged woman is wearing a red long-sleeve top.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged woman is chopping vegetables.", "type": "action", "weight": 1 }, { "content": "The woman turns her head to look over her right shoulder.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again to show the young man entering from outside the door, holding a black object. The young man walks down a hallway, then turns left. The camera pans right following the young man as he enters the kitchen. He walks to the kitchen island and places the black object on it. There are some kitchen utensils and condiment bottles on the island. The middle-aged man looks up at the young man, and the middle-aged woman turns to look at him.", "visual_elements": [ { "content": "The camera cuts again, showing the young man entering from outside the door.", "type": "camera", "weight": 3 }, { "content": "The young man is holding a black object.", "type": "attribute", "weight": 3 }, { "content": "The young man walks down a hallway and turns left.", "type": "action", "weight": 3 }, { "content": "The camera pans right following the young man.", "type": "camera", "weight": 3 }, { "content": "The young man enters the kitchen.", "type": "action", "weight": 3 }, { "content": "The young man walks to the kitchen island and places the black object on it.", "type": "action", "weight": 3 }, { "content": "There are some kitchen utensils and condiment bottles on the island.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man looks up at the young man.", "type": "action", "weight": 2 }, { "content": "The middle-aged woman turns around to look at the young man.", "type": "action", "weight": 3 } ] }, { "event": "At the end of the video, the camera focuses on the young man, who taps the black object with his left hand. The young man is speaking.", "visual_elements": [ { "content": "At the end of the video, the camera focuses on the young man.", "type": "camera", "weight": 3 }, { "content": "The young man taps the black object in front of him with his left hand.", "type": "action", "weight": 3 }, { "content": "The young man is speaking.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0360", "video_path": "VELOCITI/v_SfN8z2mHAmw_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video opens with the camera focused on a classroom. In the background, there is a bulletin board covered with various pictures and texts. Four people are in the classroom, three males and one female. The boy on the left is wearing a blue T-shirt, standing beside a piano with his hand on it. The girl in the middle, wearing a red short-sleeve T-shirt and jeans, stands in front of the piano, with one hand on the piano behind her and the other hand on her hip. The boy to the left of the girl is wearing a plaid shirt, while the boy on the far right is wearing a green hoodie and a yellow T-shirt, facing away from the camera.The camera pans right as the boy in the green hoodie moves to his right, making drumming motions. The boy in the blue T-shirt exits from the left side of the scene, and the girl in the middle and the boy in the plaid shirt look toward the boy making the drumming motions.\nThe camera cuts to a front view of the boy in the green hoodie, who is using drumsticks to hit a drum and xylophone in front of him. In the background, a blackboard, bookshelf, and lectern are visible.\nThen, the camera cuts back to the rear view of the boy in the green hoodie, moving left as he dances with his back to the camera.\nNext, the camera cuts to the front view of the boy in the green hoodie, who is dancing in the center of the classroom. In the background, two girls can be seen sitting on a desk watching. The boy in the green hoodie waves to the right side of the scene, and a boy in a blue T-shirt enters from the right, following him.\nThe camera cuts to show the boy in the green hoodie and the boy in the blue T-shirt jumping in the center of the classroom. The boy in the green hoodie turns to face the boy in the blue T-shirt.", "events": [ { "event": "The video opens with the camera focused on a classroom. In the background, there is a bulletin board covered with various pictures and texts. Four people are in the classroom, three males and one female. The boy on the left is wearing a blue T-shirt, standing beside a piano with his hand on it. The girl in the middle, wearing a red short-sleeve T-shirt and jeans, stands in front of the piano, with one hand on the piano behind her and the other hand on her hip. The boy to the left of the girl is wearing a plaid shirt, while the boy on the far right is wearing a green hoodie and a yellow T-shirt, facing away from the camera.The camera pans right as the boy in the green hoodie moves to his right, making drumming motions. The boy in the blue T-shirt exits from the left side of the scene, and the girl in the middle and the boy in the plaid shirt look toward the boy making the drumming motions.", "visual_elements": [ { "content": "The video begins with the camera focused on a classroom.", "type": "camera", "weight": 3 }, { "content": "In the background, there is a bulletin board.", "type": "scene", "weight": 2 }, { "content": "The bulletin board is covered with various pictures and text.", "type": "scene", "weight": 2 }, { "content": "There are four people in the classroom, three males and one female.", "type": "attribute", "weight": 3 }, { "content": "The boy on the left is wearing a blue T-shirt, standing beside a piano with his hand on it.", "type": "attribute", "weight": 3 }, { "content": "The girl in the middle is wearing a red short-sleeve T-shirt and jeans.", "type": "attribute", "weight": 3 }, { "content": "The girl stands in front of the piano, with one hand on the piano behind her and the other hand on her hip.", "type": "attribute", "weight": 2 }, { "content": "To the left of the girl, a boy is wearing a plaid shirt.", "type": "attribute", "weight": 3 }, { "content": "The boy on the far right is wearing a green hoodie and a yellow T-shirt.", "type": "attribute", "weight": 3 }, { "content": "The boy in the green hoodie is facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The camera pans right.", "type": "camera", "weight": 3 }, { "content": "The boy in the green hoodie moves to his right, making drumming motions.", "type": "action", "weight": 3 }, { "content": "The boy in the blue T-shirt exits from the left side of the scene.", "type": "attribute", "weight": 2 }, { "content": "The girl in the middle and the boy in the plaid shirt look toward the boy making the drumming motions.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a front view of the boy in the green hoodie, who is using drumsticks to hit a drum and xylophone in front of him. In the background, a blackboard, bookshelf, and lectern are visible.", "visual_elements": [ { "content": "The camera cuts to a front view of the boy in the green hoodie.", "type": "camera", "weight": 3 }, { "content": "The boy in the green hoodie is using drumsticks to hit a drum and xylophone in front of him.", "type": "action", "weight": 3 }, { "content": "In the background, a blackboard, bookshelf, and lectern are visible.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera cuts back to the rear view of the boy in the green hoodie, moving left as he dances with his back to the camera.", "visual_elements": [ { "content": "Then, the camera cuts back to the rear view of the boy in the green hoodie.", "type": "camera", "weight": 3 }, { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The boy in the green hoodie, with his back to the camera, dances while moving to his left.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to the front view of the boy in the green hoodie, who is dancing in the center of the classroom. In the background, two girls can be seen sitting on a desk watching. The boy in the green hoodie waves to the right side of the scene, and a boy in a blue T-shirt enters from the right, following him.", "visual_elements": [ { "content": "Next, the camera cuts to a front view of the boy in the green hoodie.", "type": "camera", "weight": 3 }, { "content": "The boy in the green hoodie is dancing in the center of the classroom.", "type": "action", "weight": 3 }, { "content": "In the background, two girls can be seen sitting on a desk watching.", "type": "scene", "weight": 2 }, { "content": "The boy in the green hoodie waves to the right side of the scene.", "type": "action", "weight": 3 }, { "content": "A boy in a blue T-shirt enters from the right, following the boy in the green hoodie.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to show the boy in the green hoodie and the boy in the blue T-shirt jumping in the center of the classroom. The boy in the green hoodie turns to face the boy in the blue T-shirt.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The boy in the green hoodie and the boy in the blue T-shirt jump in the center of the classroom.", "type": "action", "weight": 3 }, { "content": "The boy in the green hoodie turns to face the boy in the blue T-shirt.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 28 }, { "index": "TUNA_0361", "video_path": "VELOCITI/v_SfN8z2mHAmw_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video opens with the camera focused on a young man in a yellow T-shirt and green jacket, performing dance moves. In the background, white curtains can be seen.\nThen, the camera cuts to reveal a man in a blue T-shirt. The man in the yellow T-shirt and the man in the blue T-shirt are performing a synchronized dance, moving in coordination. The man in the yellow T-shirt looks over his right shoulder.\nNext, the camera turns to a young woman in a green long-sleeve top, with her hair draped over her shoulders. She is smiling as she dances, moving her arms.\nThe camera cuts to a woman in a red top, swaying to the music.\nThe camera cuts again to show the man in the yellow T-shirt, the man in the blue T-shirt, and the woman in the green long-sleeve top all performing synchronized dance moves together.\nNext, the camera shifts to a side view of the woman in the green long-sleeve top, following her arm movement to the right, and the side profile of the man in the yellow T-shirt enters the frame.\nThe camera cuts to show the man in the yellow T-shirt standing in the center of the frame. Behind him, the woman in the green long-sleeve top turns and walks away. In the background, a woman in a yellow T-shirt, facing away from the camera, stands by a table.", "events": [ { "event": "The video opens with the camera focused on a young man in a yellow T-shirt and green jacket, performing dance moves. In the background, white curtains can be seen.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a young male dressed in a yellow T-shirt and a green jacket.", "type": "camera", "weight": 3 }, { "content": "The man in the yellow T-shirt is performing dance moves.", "type": "action", "weight": 3 }, { "content": "White curtains can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera cuts to reveal a man in a blue T-shirt. The man in the yellow T-shirt and the man in the blue T-shirt are performing a synchronized dance, moving in coordination. The man in the yellow T-shirt looks over his right shoulder.", "visual_elements": [ { "content": "Then, the camera cuts.", "type": "camera", "weight": 3 }, { "content": "A man in a blue T-shirt appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "The man in the yellow T-shirt and the man in the blue T-shirt are performing synchronized dance moves.", "type": "action", "weight": 3 }, { "content": "The man in the yellow T-shirt looks over his right shoulder.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera turns to a young woman in a green long-sleeve top, with her hair draped over her shoulders. She is smiling as she dances, moving her arms.", "visual_elements": [ { "content": "Next, the camera turns to a young woman in a green long-sleeve top.", "type": "camera", "weight": 3 }, { "content": "The woman in the green long-sleeve top has her hair draped over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "The woman in the green long-sleeve top is smiling as she dances, moving her arms.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a woman in a red top, swaying to the music.", "visual_elements": [ { "content": "The camera cuts to a woman in a red top.", "type": "camera", "weight": 3 }, { "content": "The woman in the red top is swaying to the music.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again to show the man in the yellow T-shirt, the man in the blue T-shirt, and the woman in the green long-sleeve top all performing synchronized dance moves together.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "The man in the yellow T-shirt, the man in the blue T-shirt, and the woman in the green long-sleeve top all appear in the frame.", "type": "attribute", "weight": 3 }, { "content": "The three are performing synchronized dance moves.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera shifts to a side view of the woman in the green long-sleeve top, following her arm movement to the right, and the side profile of the man in the yellow T-shirt enters the frame.", "visual_elements": [ { "content": "Next, the camera shifts to a side view of the woman in the green long-sleeve top.", "type": "camera", "weight": 3 }, { "content": "The camera follows the woman’s arm movement to the right.", "type": "camera", "weight": 3 }, { "content": "The side profile of the man in the yellow T-shirt enters the frame.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to show the man in the yellow T-shirt standing in the center of the frame. Behind him, the woman in the green long-sleeve top turns and walks away. In the background, a woman in a yellow T-shirt, facing away from the camera, stands by a table.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man in the yellow T-shirt is standing in the center of the frame.", "type": "attribute", "weight": 3 }, { "content": "Behind the man in the yellow T-shirt, the woman in the green long-sleeve top turns and walks away.", "type": "action", "weight": 3 }, { "content": "In the background, a woman in a yellow T-shirt is facing away from the camera, standing by a table.", "type": "scene", "weight": 2 } ] } ], "n_events": 7, "n_elements": 22 }, { "index": "TUNA_0362", "video_path": "VELOCITI/v_T7-sw9PhQec_seg_145_155.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a blonde woman wearing a black blazer and white shirt, holding a gun pointed at the ground in front of her. The background is a dimly lit indoor environment. The woman lowers her hands holding the gun and then turns to leave.\nThe camera cuts to the blonde woman running toward the camera. Several people are lying on the ground in the background. The camera moves backward and downward as the woman runs to a convertible, throws the gun onto the back seat, and quickly opens the passenger door. The camera pans left as the blonde woman sits in the passenger seat, placing her right hand on the side of the car and her left hand on the passenger seat. The blonde woman has blood around her nose and lips. She looks at the woman in the driver’s seat, who has a low ponytail and is facing away from the camera. The driver raises her right hand.", "events": [ { "event": "The video begins with the camera focused on a blonde woman wearing a black blazer and white shirt, holding a gun pointed at the ground in front of her. The background is a dimly lit indoor environment. The woman lowers her hands holding the gun and then turns to leave.", "visual_elements": [ { "content": "The camera focuses on a blonde woman.", "type": "camera", "weight": 3 }, { "content": "The blonde woman is wearing a black blazer and a white shirt.", "type": "attribute", "weight": 2 }, { "content": "The blonde woman is holding a gun pointed at the ground in front of her.", "type": "attribute", "weight": 3 }, { "content": "The background is a dimly lit indoor environment.", "type": "scene", "weight": 1 }, { "content": "The woman lowers her hands holding the gun and then turns to leave.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the blonde woman running toward the camera. Several people are lying on the ground in the background. The camera moves backward and downward as the woman runs to a convertible, throws the gun onto the back seat, and quickly opens the passenger door. The camera pans left as the blonde woman sits in the passenger seat, placing her right hand on the side of the car and her left hand on the passenger seat. The blonde woman has blood around her nose and lips. She looks at the woman in the driver’s seat, who has a low ponytail and is facing away from the camera. The driver raises her right hand.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The blonde woman is running toward the camera.", "type": "action", "weight": 3 }, { "content": "Several people are lying on the ground in the background.", "type": "scene", "weight": 1 }, { "content": "The camera moves backward and downward.", "type": "camera", "weight": 3 }, { "content": "The woman runs to a convertible.", "type": "action", "weight": 2 }, { "content": "The woman throws the gun onto the back seat of the car.", "type": "action", "weight": 3 }, { "content": "Then the woman quickly opens the passenger door.", "type": "action", "weight": 3 }, { "content": "The camera pans left.", "type": "camera", "weight": 3 }, { "content": "The blonde woman sits in the passenger seat.", "type": "action", "weight": 3 }, { "content": "She places her right hand on the side of the car and her left hand on the passenger seat.", "type": "attribute", "weight": 2 }, { "content": "The blonde woman has blood around her nose and lips.", "type": "attribute", "weight": 2 }, { "content": "The blonde woman looks at the woman in the driver’s seat.", "type": "action", "weight": 3 }, { "content": "The woman in the driver’s seat has a low ponytail and is facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The woman in the driver’s seat raises her right hand.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 19 }, { "index": "TUNA_0363", "video_path": "VELOCITI/v_TFfe7ZgIVUc_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a man in a dark jacket with a white T-shirt underneath, standing at the doorway with a young girl behind him. The camera pans left as the man walks to the left side of the scene and turns to his left. The camera then pans right, bringing the young girl following the man into the center of the frame. She is wearing a red top and a plaid shirt, with a dark jacket over them. The girl smiles with her lips pressed together and then looks down. The camera pans left again as she turns to close the door. She brushes her hair with her hand, smiles slightly, and continues walking into the room, with other people appearing in the background.", "events": [ { "event": "The video begins with the camera focused on a man in a dark jacket with a white T-shirt underneath, standing at the doorway with a young girl behind him. The camera pans left as the man walks to the left side of the scene and turns to his left. The camera then pans right, bringing the young girl following the man into the center of the frame. She is wearing a red top and a plaid shirt, with a dark jacket over them. The girl smiles with her lips pressed together and then looks down. The camera pans left again as she turns to close the door. She brushes her hair with her hand, smiles slightly, and continues walking into the room, with other people appearing in the background.", "visual_elements": [ { "content": "The camera focuses on a man in a dark jacket with a white T-shirt underneath.", "type": "camera", "weight": 3 }, { "content": "The man is standing at the doorway.", "type": "attribute", "weight": 1 }, { "content": "A young girl is standing behind the man.", "type": "attribute", "weight": 3 }, { "content": "The camera pans left.", "type": "camera", "weight": 3 }, { "content": "The man walks to the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The man turns to his left.", "type": "action", "weight": 3 }, { "content": "The camera pans right.", "type": "camera", "weight": 3 }, { "content": "The young girl following the man appears in the center of the frame.", "type": "attribute", "weight": 3 }, { "content": "The girl is wearing a red top and a plaid shirt, with a dark jacket over them.", "type": "attribute", "weight": 3 }, { "content": "The girl smiles with her lips pressed together and then looks down.", "type": "action", "weight": 3 }, { "content": "The camera pans left.", "type": "camera", "weight": 3 }, { "content": "The girl turns to close the door.", "type": "action", "weight": 3 }, { "content": "The girl brushes her hair with her hand and smiles slightly.", "type": "action", "weight": 3 }, { "content": "The girl continues walking into the room.", "type": "action", "weight": 3 }, { "content": "Other people appear in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 1, "n_elements": 15 }, { "index": "TUNA_0364", "video_path": "VELOCITI/v_TLsRWN6G77I_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a low-angle shot of a curly-haired woman in a dark wool coat, standing with her arms crossed in front of her chest against a wooden wall. The camera slowly pans upward as the woman, arms still crossed, speaks while gradually walking forward. The camera moves downward, capturing only the upper part of her shoulders and above. The woman stops walking, kneeling or crouching down, and looks down at the person opposite her.", "events": [ { "event": "The video begins with a low-angle shot of a curly-haired woman in a dark wool coat, standing with her arms crossed in front of her chest against a wooden wall. The camera slowly pans upward as the woman, arms still crossed, speaks while gradually walking forward. The camera moves downward, capturing only the upper part of her shoulders and above. The woman stops walking, kneeling or crouching down, and looks down at the person opposite her.", "visual_elements": [ { "content": "The camera captures a low-angle shot of a curly-haired woman in a dark wool coat.", "type": "camera", "weight": 3 }, { "content": "The woman stands with her arms crossed in front of her chest against a wooden wall.", "type": "attribute", "weight": 3 }, { "content": "The camera slowly moves upward.", "type": "camera", "weight": 3 }, { "content": "The woman, with her arms still crossed, speaks while slowly walking forward.", "type": "action", "weight": 3 }, { "content": "The camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The camera captures only from the woman's shoulders and above.", "type": "camera", "weight": 3 }, { "content": "The woman stops walking, kneeling or crouching down.", "type": "action", "weight": 3 }, { "content": "The woman looks down at the person opposite her.", "type": "action", "weight": 2 } ] } ], "n_events": 1, "n_elements": 8 }, { "index": "TUNA_0365", "video_path": "VELOCITI/v_TYJXBdLgPks_seg_220_230.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a group of audience members. A few women in the front row, dressed casually, sway rhythmically to the music with expressions of appreciation and fondness, their eyes fixed intently on the stage ahead.\nThe camera cuts to a male guitarist on stage, capturing his face from a low-angle shot. He is dressed in dark clothing, with his fingers moving rapidly over the guitar strings, and blurred stage lighting effects in the background.\nThe camera cuts to a medium-close shot of the lead singer at the microphone, wearing a light denim jacket, singing and playing guitar.\nThe camera cuts to a shot of the lead singer’s upper body, with audience arms waving in the foreground.\nThe camera returns to the first guitarist.\nThe camera then cuts to a wide shot showing both the lead singer and guitarist focused on their performance, with a large stage sign in the background that reads “OPEN MIC NIGHT EVERY TUESDAY AT 8PM.”\nThe camera cuts to a medium-close shot of another drummer in a black T-shirt.\nThe camera cuts to the guitarist.\nThe camera cuts to the lead singer.", "events": [ { "event": "The video begins with the camera focused on a group of audience members. A few women in the front row, dressed casually, sway rhythmically to the music with expressions of appreciation and fondness, their eyes fixed intently on the stage ahead.", "visual_elements": [ { "content": "The video begins with the camera focused on a group of audience members.", "type": "camera", "weight": 3 }, { "content": "In the front row are several women.", "type": "attribute", "weight": 2 }, { "content": "The women are dressed casually, swaying rhythmically.", "type": "action", "weight": 3 }, { "content": "The women look toward the stage with expressions of appreciation and fondness.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a male guitarist on stage, capturing his face from a low-angle shot. He is dressed in dark clothing, with his fingers moving rapidly over the guitar strings, and blurred stage lighting effects in the background.", "visual_elements": [ { "content": "The camera cuts to a male guitarist on stage.", "type": "camera", "weight": 3 }, { "content": "The camera captures his face from a low-angle shot.", "type": "camera", "weight": 3 }, { "content": "The guitarist is wearing dark clothing.", "type": "attribute", "weight": 2 }, { "content": "His fingers move rapidly over the guitar strings.", "type": "action", "weight": 3 }, { "content": "The background is a blurred stage lighting effect.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a medium-close shot of the lead singer at the microphone, wearing a light denim jacket, singing and playing guitar.", "visual_elements": [ { "content": "The camera cuts to a medium-close shot of the lead singer at the microphone.", "type": "camera", "weight": 3 }, { "content": "The lead singer, dressed in a light denim jacket, sings and plays guitar.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a shot of the lead singer’s upper body, with audience arms waving in the foreground.", "visual_elements": [ { "content": "The camera cuts to the lead singer’s upper body.", "type": "camera", "weight": 3 }, { "content": "The audience’s arms wave in the foreground.", "type": "scene", "weight": 2 } ] }, { "event": "The camera returns to the first guitarist.", "visual_elements": [ { "content": "The camera returns to the first guitarist.", "type": "camera", "weight": 3 } ] }, { "event": "The camera then cuts to a wide shot showing both the lead singer and guitarist focused on their performance, with a large stage sign in the background that reads “OPEN MIC NIGHT EVERY TUESDAY AT 8PM.”", "visual_elements": [ { "content": "The camera cuts to a wide shot.", "type": "camera", "weight": 3 }, { "content": "The lead singer and guitarist perform intently on stage.", "type": "action", "weight": 3 }, { "content": "A large sign is visible in the background.", "type": "scene", "weight": 2 }, { "content": "The sign reads “OPEN MIC NIGHT EVERY TUESDAY AT 8PM.”", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a medium-close shot of another drummer in a black T-shirt.", "visual_elements": [ { "content": "The camera cuts to a medium-close shot of another drummer in a black T-shirt.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to the guitarist.", "visual_elements": [ { "content": "The camera cuts to the guitarist.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to the lead singer.", "visual_elements": [ { "content": "The camera cuts to the lead singer.", "type": "camera", "weight": 3 } ] } ], "n_events": 9, "n_elements": 21 }, { "index": "TUNA_0366", "video_path": "VELOCITI/v_TYJXBdLgPks_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a close-up of two women, with a blurred background and dim lighting. The woman closer to the camera is wearing green-framed glasses and has light-colored hair, while the woman beside her has curly hair and heavy makeup. The curly-haired woman turns and speaks to the woman with glasses.\nThe camera cuts to a stage where a man in a denim jacket stands at a microphone, holding a blue guitar and singing while playing. The stage background reads “OPEN MIC NIGHT AT 8 PM.” Behind him, a drummer in a black T-shirt plays intently.\nThe camera cuts again to a man in a black top hat and black clothing. He has a black necklace around his neck, is looking down with a cigarette in his mouth, and is making some sort of movement with his hands.\nNext, the camera cuts to a close-up of the man singing on stage, his expression focused as he sings and looks forward.\nThe camera cuts to the side profile of a man with spiked hair, with a blurred background.\nThe camera focuses on a hand holding an old-style mobile phone, displaying a text message: “Hi, at gig. It’s not that good. Band called something.” The time shown is 21:37.", "events": [ { "event": "The video begins with a close-up of two women, with a blurred background and dim lighting. The woman closer to the camera is wearing green-framed glasses and has light-colored hair, while the woman beside her has curly hair and heavy makeup. The curly-haired woman turns and speaks to the woman with glasses.", "visual_elements": [ { "content": "The video begins with a close-up of two women.", "type": "camera", "weight": 3 }, { "content": "The background is blurred, and the lighting is dim.", "type": "scene", "weight": 1 }, { "content": "The woman closer to the camera is wearing green-framed glasses and has light-colored hair.", "type": "attribute", "weight": 2 }, { "content": "The other woman has curly hair and heavy makeup.", "type": "attribute", "weight": 2 }, { "content": "The curly-haired woman turns to speak to the woman with glasses.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a stage where a man in a denim jacket stands at a microphone, holding a blue guitar and singing while playing. The stage background reads “OPEN MIC NIGHT AT 8 PM.” Behind him, a drummer in a black T-shirt plays intently.", "visual_elements": [ { "content": "The camera cuts to a stage.", "type": "camera", "weight": 3 }, { "content": "A man in a denim jacket is standing at the microphone.", "type": "attribute", "weight": 3 }, { "content": "The man is holding a blue guitar.", "type": "attribute", "weight": 3 }, { "content": "The man is singing while playing the guitar.", "type": "action", "weight": 3 }, { "content": "The stage background reads “OPEN MIC NIGHT AT 8 PM.”", "type": "scene", "weight": 2 }, { "content": "Behind the man, there is a drummer wearing a black T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The drummer is playing the drums intently.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again to a man in a black top hat and black clothing. He has a black necklace around his neck, is looking down with a cigarette in his mouth, and is making some sort of movement with his hands.", "visual_elements": [ { "content": "The camera cuts again to a man in a black top hat.", "type": "camera", "weight": 3 }, { "content": "The man is dressed in black clothing.", "type": "attribute", "weight": 2 }, { "content": "The man has a black necklace around his neck.", "type": "attribute", "weight": 2 }, { "content": "The man is looking down with a cigarette in his mouth, making some sort of movement with his hands.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a close-up of the man singing on stage, his expression focused as he sings and looks forward.", "visual_elements": [ { "content": "The camera cuts to a close-up of the man singing on stage.", "type": "camera", "weight": 1 }, { "content": "The man has a focused expression, singing while looking forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the side profile of a man with spiked hair, with a blurred background.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The side profile of a man with spiked hair appears in the frame.", "type": "attribute", "weight": 3 }, { "content": "The background is blurred.", "type": "scene", "weight": 2 } ] }, { "event": "The camera focuses on a hand holding an old-style mobile phone, displaying a text message: “Hi, at gig. It’s not that good. Band called something.” The time shown is 21:37.", "visual_elements": [ { "content": "The camera focuses on a hand.", "type": "camera", "weight": 2 }, { "content": "The hand is holding an old-style mobile phone.", "type": "attribute", "weight": 3 }, { "content": "A text message is displayed on the screen.", "type": "attribute", "weight": 2 }, { "content": "The message reads, “Hi, at gig. It’s not that good. Band called something.”", "type": "attribute", "weight": 2 }, { "content": "The time on the phone reads 21:37.", "type": "attribute", "weight": 2 } ] } ], "n_events": 6, "n_elements": 26 }, { "index": "TUNA_0367", "video_path": "VELOCITI/v_Tr3_HOXg4Ug_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a baby sitting in an infant car seat. The baby is wearing a pair of white over-ear headphones, a gray long-sleeve outfit, and is strapped in. The baby has wide-open eyes and places both hands near their shoulders. In the background, a person dressed in black knocks another person to the ground and then approaches the baby. This person in black is holding a handgun in their right hand.\nThe camera cuts to a bald man dressed in black, picking up the infant car seat in front of him and walking forward. Suddenly, the man stops, and an airplane window is visible in the background.\nThe camera cuts to a low-angle shot of the man’s upper body, with his left hand holding onto a support above. He first sniffs the air with his nose, then looks down toward the camera, speaking.\nThe camera cuts to a front view of the baby, whose eyes look to their left. A blurry black outline is visible on the left side of the frame.", "events": [ { "event": "The video begins with the camera focused on a baby sitting in an infant car seat. The baby is wearing a pair of white over-ear headphones, a gray long-sleeve outfit, and is strapped in. The baby has wide-open eyes and places both hands near their shoulders. In the background, a person dressed in black knocks another person to the ground and then approaches the baby. This person in black is holding a handgun in their right hand.", "visual_elements": [ { "content": "The camera focuses on a baby sitting in an infant car seat.", "type": "camera", "weight": 3 }, { "content": "The baby is wearing a pair of white over-ear headphones.", "type": "attribute", "weight": 2 }, { "content": "The baby is dressed in a gray long-sleeve outfit and strapped in.", "type": "attribute", "weight": 2 }, { "content": "The baby has wide-open eyes, with both hands placed near their shoulders.", "type": "attribute", "weight": 3 }, { "content": "In the background, a person dressed in black knocks another person to the ground.", "type": "scene", "weight": 3 }, { "content": "Then, the person in black approaches the baby.", "type": "action", "weight": 3 }, { "content": "The person in black is holding a handgun in their right hand.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a bald man dressed in black, picking up the infant car seat in front of him and walking forward. Suddenly, the man stops, and an airplane window is visible in the background.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 2 }, { "content": "A bald man dressed in black picks up the infant car seat in front of him and walks forward.", "type": "action", "weight": 3 }, { "content": "The man stops walking.", "type": "action", "weight": 3 }, { "content": "An airplane window is visible in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a low-angle shot of the man’s upper body, with his left hand holding onto a support above. He first sniffs the air with his nose, then looks down toward the camera, speaking.", "visual_elements": [ { "content": "The camera cuts to a low-angle shot of the man’s upper body.", "type": "camera", "weight": 3 }, { "content": "The man holds onto a support above with his left hand.", "type": "attribute", "weight": 2 }, { "content": "The man first sniffs the air.", "type": "action", "weight": 2 }, { "content": "The man then looks down at the camera, speaking.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a front view of the baby, whose eyes look to their left. A blurry black outline is visible on the left side of the frame.", "visual_elements": [ { "content": "The camera returns to a front view of the baby.", "type": "camera", "weight": 3 }, { "content": "The baby’s eyes look to their left.", "type": "action", "weight": 3 }, { "content": "A blurry black outline is visible on the left side of the frame.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0368", "video_path": "VELOCITI/v_Tsk9gCcchg8_seg_110_120.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on an old stone building with two white windows on the exterior wall. Outside the windows, there are several protruding ledges. A man in a gray suit and white shirt crouches on the ledge outside the window. He holds the edge of the ledge and jumps toward the platform on the right side of the scene. The camera follows his movement to the right.\nThe camera cuts to a middle-aged man with a mustache, wearing a fur coat. He stands on an open field, looking up in surprise. In the background, beyond the field, there is a patch of barren trees.\nThe camera cuts back to the exterior of the stone building, capturing a low-angle shot as the man in the gray suit jumps down from a high ledge. His figure is obscured by an outer wall after he lands.\nThe camera cuts to a close-up of a man with long curly hair, holding onto a stone wall in front of him and looking upward with a tense, focused expression. He then bends down and looks back. The camera follows his movement downward.\nThe camera returns to the man in the fur coat.\nThe camera cuts to a rear view of the man in the fur coat, with a red car parked in front of him. In the background, below the stone wall's railing, the man with long curly hair is bent over, stepping back and turning. The man in the gray suit is crouching, preparing to jump from the railing.", "events": [ { "event": "The camera focuses on an old stone building with two white windows on the exterior wall. Outside the windows, there are several protruding ledges. A man in a gray suit and white shirt crouches on the ledge outside the window. He holds the edge of the ledge and jumps toward the platform on the right side of the scene. The camera follows his movement to the right.", "visual_elements": [ { "content": "The camera focuses on an ancient stone building.", "type": "camera", "weight": 3 }, { "content": "There are two white windows on the exterior wall of the building.", "type": "attribute", "weight": 2 }, { "content": "Outside the windows, there are several protruding ledges.", "type": "attribute", "weight": 2 }, { "content": "A man in a gray suit and white shirt crouches on the ledge outside the window.", "type": "attribute", "weight": 3 }, { "content": "The man holds onto the edge of the ledge and jumps toward the platform on the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The camera follows his movement to the right.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to a middle-aged man with a mustache, wearing a fur coat. He stands on an open field, looking up in surprise. In the background, beyond the field, there is a patch of barren trees.", "visual_elements": [ { "content": "The camera cuts to a middle-aged man with a mustache, wearing a fur coat.", "type": "camera", "weight": 3 }, { "content": "The man stands on an open field.", "type": "attribute", "weight": 2 }, { "content": "The man looks up ahead with a surprised expression.", "type": "action", "weight": 3 }, { "content": "In the background, beyond the field, there is a patch of barren trees.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts back to the exterior of the stone building, capturing a low-angle shot as the man in the gray suit jumps down from a high ledge. His figure is obscured by an outer wall after he lands.", "visual_elements": [ { "content": "The camera cuts back to the exterior of the stone building.", "type": "camera", "weight": 3 }, { "content": "The camera captures a low-angle shot.", "type": "camera", "weight": 3 }, { "content": "The man in the gray suit jumps down from a high ledge.", "type": "action", "weight": 3 }, { "content": "His figure is obscured by an outer wall after he lands.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a close-up of a man with long curly hair, holding onto a stone wall in front of him and looking upward with a tense, focused expression. He then bends down and looks back. The camera follows his movement downward.", "visual_elements": [ { "content": "The camera cuts to a close-up of a man with long curly hair.", "type": "camera", "weight": 3 }, { "content": "The man holds onto the stone wall in front of him, looking up with a tense and focused expression.", "type": "attribute", "weight": 3 }, { "content": "The man bends down and looks back.", "type": "action", "weight": 3 }, { "content": "The camera follows his movement downward.", "type": "camera", "weight": 3 } ] }, { "event": "The camera returns to the man in the fur coat.", "visual_elements": [ { "content": "The camera returns to the man in the fur coat.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to a rear view of the man in the fur coat, with a red car parked in front of him. In the background, below the stone wall's railing, the man with long curly hair is bent over, stepping back and turning. The man in the gray suit is crouching, preparing to jump from the railing.", "visual_elements": [ { "content": "The camera cuts to a rear view of the man in the fur coat.", "type": "camera", "weight": 3 }, { "content": "A red car is parked in front of him.", "type": "attribute", "weight": 2 }, { "content": "In the background, below the stone wall’s railing, the man with long curly hair bends over, steps back, and turns.", "type": "action", "weight": 3 }, { "content": "The man in the gray suit crouches, preparing to jump from the railing.", "type": "attribute", "weight": 3 } ] } ], "n_events": 6, "n_elements": 23 }, { "index": "TUNA_0369", "video_path": "VELOCITI/v_Tsk9gCcchg8_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with an overhead shot of a red vintage car parked on an open area in front of an ancient building. The ground is covered in gray gravel, with patches of green moss. Next to the red car are two people; one stands in the back seat of the car, and the other stands beside the car. The person standing beside the car moves to the back seat, reaches out with both hands, and lifts the person standing in the back seat down by their underarms.\nThe camera cuts to a close-up of the two men. The man standing by the car is wearing a black coat with a brown fur collar; he holds onto another man in a brown coat, who has black glasses and messy, curly hair. The man in the black coat removes the other man’s glasses and throws them away. In the background, there are arches and stone walls of an ancient building complex.\nThe camera cuts to a low-angle shot of a man in a dark coat and white shirt standing high on the building, looking down anxiously.\nThe camera cuts again to a full-body shot of the two men. The man in the black coat pins the man in the brown coat against the red car, with the brown coat man facing away from him. The man in the black coat retrieves a slingshot from the car, holds the brown coat man in place with his right hand, and strikes him on the backside with the slingshot.\nThe camera cuts to the man on the high part of the building, who looks surprised and tense.", "events": [ { "event": "The video begins with an overhead shot of a red vintage car parked on an open area in front of an ancient building. The ground is covered in gray gravel, with patches of green moss. Next to the red car are two people; one stands in the back seat of the car, and the other stands beside the car. The person standing beside the car moves to the back seat, reaches out with both hands, and lifts the person standing in the back seat down by their underarms.", "visual_elements": [ { "content": "The video begins with an overhead shot.", "type": "camera", "weight": 3 }, { "content": "A red vintage car is parked on an open area in front of an ancient building.", "type": "attribute", "weight": 3 }, { "content": "The ground is covered in gray gravel.", "type": "scene", "weight": 2 }, { "content": "Some parts of the ground are covered in green moss.", "type": "scene", "weight": 2 }, { "content": "There are two people beside the red car.", "type": "attribute", "weight": 3 }, { "content": "One person is standing in the back seat of the car.", "type": "attribute", "weight": 2 }, { "content": "Another person is standing next to the car.", "type": "attribute", "weight": 2 }, { "content": "The person standing next to the car moves to the back seat and extends both hands.", "type": "action", "weight": 3 }, { "content": "The person next to the car grabs the underarms of the person in the back seat and lifts them down.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up of the two men. The man standing by the car is wearing a black coat with a brown fur collar; he holds onto another man in a brown coat, who has black glasses and messy, curly hair. The man in the black coat removes the other man’s glasses and throws them away. In the background, there are arches and stone walls of an ancient building complex.", "visual_elements": [ { "content": "The camera cuts to a close-up of the two men.", "type": "camera", "weight": 3 }, { "content": "The man standing by the car is wearing a black coat with a brown fur collar.", "type": "attribute", "weight": 3 }, { "content": "The man in the black coat is holding another man in a brown coat, wearing black glasses, with messy, curly hair.", "type": "attribute", "weight": 3 }, { "content": "The man in the black coat removes the brown coat man’s glasses and throws them away.", "type": "action", "weight": 3 }, { "content": "In the background, there are arches and stone walls of an ancient building complex.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a low-angle shot of a man in a dark coat and white shirt standing high on the building, looking down anxiously.", "visual_elements": [ { "content": "The camera cuts to a low-angle shot.", "type": "camera", "weight": 3 }, { "content": "A man in a dark coat and white shirt stands high on the building.", "type": "attribute", "weight": 3 }, { "content": "The man looks down anxiously.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts again to a full-body shot of the two men. The man in the black coat pins the man in the brown coat against the red car, with the brown coat man facing away from him. The man in the black coat retrieves a slingshot from the car, holds the brown coat man in place with his right hand, and strikes him on the backside with the slingshot.", "visual_elements": [ { "content": "The camera cuts again to a full-body shot of the two men.", "type": "camera", "weight": 3 }, { "content": "The man in the black coat pins the man in the brown coat against the red car.", "type": "action", "weight": 3 }, { "content": "The man in the brown coat faces away from the man in the black coat.", "type": "attribute", "weight": 2 }, { "content": "The man in the black coat retrieves a slingshot from the car.", "type": "action", "weight": 3 }, { "content": "The man in the black coat holds down the brown coat man with his right hand and strikes his backside forcefully with the slingshot.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the man on the high part of the building, who looks surprised and tense.", "visual_elements": [ { "content": "The camera cuts to the man on the high part of the building.", "type": "camera", "weight": 3 }, { "content": "The man looks surprised and tense.", "type": "attribute", "weight": 3 } ] } ], "n_events": 5, "n_elements": 24 }, { "index": "TUNA_0370", "video_path": "VELOCITI/v_TwgvEloIXVc_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on the side profile of a woman as she runs to the left side of the scene, shouting, with a blurred background.\nThe camera pans to a rear view of the woman, who is wearing a reddish-brown coat and has curly hair. She runs across the street, with a row of shops and buildings in the background, glancing to her left as she crosses. The camera pans left, following the woman’s gaze, and pedestrians are walking on both sides of the street. Then the camera pans right as the woman in the reddish-brown coat runs to a man sitting by the roadside, dressed in a blue jacket and brown pants, with a yellow dog pacing behind him. The woman bends down, grabbing the man’s left arm, but he struggles to pull his arm away. The camera moves up, showing a woman in a green top and beige pants running out from a store. The camera moves down as the man stands, slightly pulling his head back, resisting the touch of the woman in the reddish-brown coat. The woman in the green top watches them tensely from behind.", "events": [ { "event": "The camera focuses on the side profile of a woman as she runs to the left side of the scene, shouting, with a blurred background.", "visual_elements": [ { "content": "The camera focuses on the side profile of a woman.", "type": "camera", "weight": 3 }, { "content": "The woman runs to the left side of the scene, shouting.", "type": "action", "weight": 3 }, { "content": "The background is blurred.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans to a rear view of the woman, who is wearing a reddish-brown coat and has curly hair. She runs across the street, with a row of shops and buildings in the background, glancing to her left as she crosses. The camera pans left, following the woman’s gaze, and pedestrians are walking on both sides of the street. Then the camera pans right as the woman in the reddish-brown coat runs to a man sitting by the roadside, dressed in a blue jacket and brown pants, with a yellow dog pacing behind him. The woman bends down, grabbing the man’s left arm, but he struggles to pull his arm away. The camera moves up, showing a woman in a green top and beige pants running out from a store. The camera moves down as the man stands, slightly pulling his head back, resisting the touch of the woman in the reddish-brown coat. The woman in the green top watches them tensely from behind.", "visual_elements": [ { "content": "The camera pans to a rear view of the woman.", "type": "camera", "weight": 2 }, { "content": "The woman is wearing a reddish-brown coat with curly hair.", "type": "attribute", "weight": 2 }, { "content": "The woman runs across the street.", "type": "action", "weight": 3 }, { "content": "The background shows a row of shops and buildings.", "type": "scene", "weight": 2 }, { "content": "As the woman crosses the road, she looks to her left.", "type": "action", "weight": 3 }, { "content": "The camera pans left, following the woman’s gaze.", "type": "camera", "weight": 3 }, { "content": "Pedestrians are walking on both sides of the street.", "type": "attribute", "weight": 2 }, { "content": "Then, the camera pans right.", "type": "camera", "weight": 3 }, { "content": "The woman in the reddish-brown coat runs to a man sitting by the roadside.", "type": "action", "weight": 3 }, { "content": "The man is wearing a blue jacket and brown pants.", "type": "attribute", "weight": 2 }, { "content": "a yellow dog is pacing behind the man.", "type": "scene", "weight": 2 }, { "content": "The woman bends down, grabbing the man’s left arm.", "type": "action", "weight": 3 }, { "content": "The man struggles, trying to pull his arm away from the woman’s grasp.", "type": "action", "weight": 3 }, { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "A woman in a green top and beige pants runs out of a store.", "type": "action", "weight": 3 }, { "content": "The camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The man stands, slightly pulling his head back, resisting the touch of the woman in the reddish-brown coat.", "type": "action", "weight": 3 }, { "content": "The woman in the green top watches them tensely from behind.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 21 }, { "index": "TUNA_0371", "video_path": "VELOCITI/v_UDSfJaVC0KY_seg_110_120.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a low-angle shot of a man hanging from the cage railing. He is dressed in a red bodysuit and mask, dark tights, and red shoes. In the foreground, on the left side of the frame, the back of a muscular man wearing a tank top and long, flowing curly hair is visible.\nThe camera cuts to the back view of the man in red. The muscular man looks up, speaking to him. The muscular man has red skin, wears a black leather vest and pants, and has black gloves on his hands.\nThe camera cuts to a close-up of the red-clad man's face as he looks down, speaking. His upper clothing has a spider emblem.\nThe camera cuts to the muscular man, who jumps up with excitement.\nThe camera cuts to the man in red, who flips down from the cage railing. In the background, the audience and a displayed American flag outside the cage are visible.\nThe camera cuts as the man in red lands on the ground behind the muscular man. After landing, he crouches, with both hands on the ground in an attack-ready stance, while the background shows cheering spectators and flashing lights.\nThe camera cuts to a close-up of the muscular man's upper body as he charges forward with anger.\nThe camera cuts to the man in red, who raises his head and shoots webs from both hands toward the top of the cage. The camera follows the web upwards.\nThe camera focuses on the top steel bar, where the web sticks.\nThe camera cuts as the muscular man charges toward the man in red.", "events": [ { "event": "The video begins with a low-angle shot of a man hanging from the cage railing. He is dressed in a red bodysuit and mask, dark tights, and red shoes. In the foreground, on the left side of the frame, the back of a muscular man wearing a tank top and long, flowing curly hair is visible.", "visual_elements": [ { "content": "The camera captures a low-angle shot of a man hanging from the cage railing.", "type": "camera", "weight": 3 }, { "content": "The man is dressed in a red bodysuit and mask, dark tights, and red shoes.", "type": "attribute", "weight": 3 }, { "content": "In the foreground, on the left side of the frame, the back of a muscular man wearing a tank top is visible.", "type": "scene", "weight": 3 }, { "content": "The muscular man has long, flowing curly hair.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to the back view of the man in red. The muscular man looks up, speaking to him. The muscular man has red skin, wears a black leather vest and pants, and has black gloves on his hands.", "visual_elements": [ { "content": "The camera cuts to the back view of the man in red.", "type": "camera", "weight": 3 }, { "content": "The muscular man looks up, speaking to the man in red.", "type": "action", "weight": 3 }, { "content": "The muscular man has red skin, wears a black leather vest and pants, and has black gloves on his hands.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a close-up of the red-clad man's face as he looks down, speaking. His upper clothing has a spider emblem.", "visual_elements": [ { "content": "The camera cuts to a close-up of the red-clad man's face.", "type": "camera", "weight": 3 }, { "content": "The man in red looks down, speaking.", "type": "action", "weight": 2 }, { "content": "The man’s upper clothing has a spider emblem.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to the muscular man, who jumps up with excitement.", "visual_elements": [ { "content": "The camera cuts to the muscular man.", "type": "camera", "weight": 3 }, { "content": "The muscular man jumps up with excitement.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the man in red, who flips down from the cage railing. In the background, the audience and a displayed American flag outside the cage are visible.", "visual_elements": [ { "content": "The camera cuts to the man in red.", "type": "camera", "weight": 3 }, { "content": "The man in red flips down from the cage railing.", "type": "action", "weight": 3 }, { "content": "In the background, the audience and an American flag outside the cage are visible.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts as the man in red lands on the ground behind the muscular man. After landing, he crouches, with both hands on the ground in an attack-ready stance, while the background shows cheering spectators and flashing lights.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man in red lands on the ground behind the muscular man.", "type": "action", "weight": 3 }, { "content": "After landing, the man in red crouches, with both hands on the ground in an attack-ready stance.", "type": "action", "weight": 3 }, { "content": "In the background, cheering spectators and flashing lights are visible.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a close-up of the muscular man's upper body as he charges forward with anger.", "visual_elements": [ { "content": "The camera cuts to a close-up of the muscular man’s upper body.", "type": "camera", "weight": 3 }, { "content": "The muscular man charges forward with anger.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the man in red, who raises his head and shoots webs from both hands toward the top of the cage. The camera follows the web upwards.", "visual_elements": [ { "content": "The camera cuts to the man in red.", "type": "camera", "weight": 3 }, { "content": "The man in red raises his head and shoots webs from both hands toward the top of the cage.", "type": "action", "weight": 3 }, { "content": "The camera follows the web upwards.", "type": "camera", "weight": 3 } ] }, { "event": "The camera focuses on the top steel bar, where the web sticks.", "visual_elements": [ { "content": "The camera focuses on the top steel bar.", "type": "camera", "weight": 3 }, { "content": "The web sticks to the top steel bar.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts as the muscular man charges toward the man in red.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The muscular man charges toward the man in red.", "type": "action", "weight": 3 } ] } ], "n_events": 10, "n_elements": 28 }, { "index": "TUNA_0372", "video_path": "VELOCITI/v_UFXtRWVYQV8_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a close-up of two men in military uniforms. The man on the left side of the frame is looking to his left, showing his side profile to the camera. He wears a green military uniform. In the center of the frame is a middle-aged man, also in uniform, with \"MOORE\" printed on his chest. The middle-aged man is speaking to the man opposite him, with a blue curtain in the background.\nThe camera cuts to a side view of the middle-aged man, with the young man’s full face now in the frame. \"BEAN\" is printed on the right chest of the young man’s uniform, and there is a star insignia on his left shoulder. In the background, a colorful painting hangs on the wall. The middle-aged man stands up and steps forward, with the young man following suit.\nThe camera cuts to a view from behind both men as they stop in front of a table. On a platform behind the table stands a cross, with a stained glass window on the wall behind it. Both men bend down, placing their hands on the table, and slowly kneel.", "events": [ { "event": "The video begins with a close-up of two men in military uniforms. The man on the left side of the frame is looking to his left, showing his side profile to the camera. He wears a green military uniform. In the center of the frame is a middle-aged man, also in uniform, with \"MOORE\" printed on his chest. The middle-aged man is speaking to the man opposite him, with a blue curtain in the background.", "visual_elements": [ { "content": "The video begins with the camera focused on two men in military uniforms.", "type": "camera", "weight": 3 }, { "content": "The man on the left side of the frame is looking to his left, showing his side profile to the camera.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a green military uniform.", "type": "attribute", "weight": 2 }, { "content": "In the center of the frame is a middle-aged man also in military uniform.", "type": "attribute", "weight": 3 }, { "content": "\"MOORE\" is printed on the chest of the middle-aged man’s uniform.", "type": "attribute", "weight": 2 }, { "content": "The middle-aged man is speaking to the man opposite him.", "type": "action", "weight": 3 }, { "content": "A blue curtain is visible in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a side view of the middle-aged man, with the young man’s full face now in the frame. \"BEAN\" is printed on the right chest of the young man’s uniform, and there is a star insignia on his left shoulder. In the background, a colorful painting hangs on the wall. The middle-aged man stands up and steps forward, with the young man following suit.", "visual_elements": [ { "content": "The camera cuts to a side view of the middle-aged man.", "type": "camera", "weight": 3 }, { "content": "The young man’s full face appears in the frame opposite the middle-aged man.", "type": "attribute", "weight": 3 }, { "content": "\"BEAN\" is printed on the right chest of the young man’s uniform.", "type": "attribute", "weight": 1 }, { "content": "A star insignia is on the young man’s left shoulder.", "type": "attribute", "weight": 2 }, { "content": "a colorful painting hangs on the wall in the background.", "type": "scene", "weight": 2 }, { "content": "The middle-aged man stands up and steps forward.", "type": "action", "weight": 3 }, { "content": "The young man also stands and follows suit.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to a view from behind both men as they stop in front of a table. On a platform behind the table stands a cross, with a stained glass window on the wall behind it. Both men bend down, placing their hands on the table, and slowly kneel.", "visual_elements": [ { "content": "The camera cuts to a view from behind both men.", "type": "camera", "weight": 3 }, { "content": "The two men stop in front of a table.", "type": "action", "weight": 3 }, { "content": "A cross is placed on a platform behind the table.", "type": "attribute", "weight": 3 }, { "content": "a stained glass window is on the wall behind the platform.", "type": "attribute", "weight": 3 }, { "content": "Both men bend down, placing their hands on the table, and slowly kneel.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0373", "video_path": "VELOCITI/v_UJ_zLBr1NxE_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a man and a woman. The man on the left side of the frame is wearing a brown jacket and is bent over, looking out of the airplane window. The blonde woman on the right is also leaning to look outside. Suddenly, the man turns right and runs forward, with his back to the camera, and the blonde woman follows suit, turning and running after him.\nThe camera cuts to a white airplane flying through the sky, emitting a trail of black exhaust. The sky is filled with clouds, and the camera moves left, following the plane.\nThe camera cuts to the radar screen and instrument panel of the plane, showing several green radar images and indicator lights displaying error messages.\nThe camera cuts to the cockpit, where two pilots sit with their backs to the camera in their respective seats, wearing flight uniforms and over-ear headphones.\nThe camera cuts to a side view of the pilots, with the co-pilot looking forward and speaking with agitation. The camera moves right as the man in the brown jacket enters, pushing open the cockpit door and rushing toward the pilots. In the background, the blonde woman is also visible at the cockpit door.", "events": [ { "event": "The camera focuses on a man and a woman. The man on the left side of the frame is wearing a brown jacket and is bent over, looking out of the airplane window. The blonde woman on the right is also leaning to look outside. Suddenly, the man turns right and runs forward, with his back to the camera, and the blonde woman follows suit, turning and running after him.", "visual_elements": [ { "content": "The camera focuses on a man and a woman.", "type": "camera", "weight": 3 }, { "content": "The man on the left side of the frame is wearing a brown jacket.", "type": "attribute", "weight": 2 }, { "content": "The man is bent over, looking out of the airplane window.", "type": "action", "weight": 3 }, { "content": "The blonde woman on the right side is also leaning to look outside.", "type": "attribute", "weight": 3 }, { "content": "Suddenly, the man turns right and runs forward, with his back to the camera.", "type": "action", "weight": 3 }, { "content": "The blonde woman then turns right and follows the man.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a white airplane flying through the sky, emitting a trail of black exhaust. The sky is filled with clouds, and the camera moves left, following the plane.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A white airplane is flying in the sky.", "type": "action", "weight": 3 }, { "content": "The plane is emitting a trail of black exhaust.", "type": "attribute", "weight": 3 }, { "content": "The sky is densely filled with clouds.", "type": "scene", "weight": 2 }, { "content": "The camera moves left, following the plane.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to the radar screen and instrument panel of the plane, showing several green radar images and indicator lights displaying error messages.", "visual_elements": [ { "content": "The camera cuts to the radar screen and instrument panel of the plane.", "type": "camera", "weight": 3 }, { "content": "Multiple green radar images and indicators are showing error messages.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to the cockpit, where two pilots sit with their backs to the camera in their respective seats, wearing flight uniforms and over-ear headphones.", "visual_elements": [ { "content": "The camera cuts to the cockpit.", "type": "camera", "weight": 3 }, { "content": "Two pilots are seated in their positions, with their backs to the camera.", "type": "attribute", "weight": 3 }, { "content": "The pilots are wearing flight uniforms and over-ear headphones.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a side view of the pilots, with the co-pilot looking forward and speaking with agitation. The camera moves right as the man in the brown jacket enters, pushing open the cockpit door and rushing toward the pilots. In the background, the blonde woman is also visible at the cockpit door.", "visual_elements": [ { "content": "The camera cuts to a side view of the pilots.", "type": "camera", "weight": 3 }, { "content": "The co-pilot looks forward and speaks with agitation.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The man in the brown jacket opens the cockpit door and enters.", "type": "action", "weight": 3 }, { "content": "The man rushes toward the pilots.", "type": "action", "weight": 2 }, { "content": "In the background, the blonde woman is also visible at the cockpit door.", "type": "scene", "weight": 2 } ] } ], "n_events": 5, "n_elements": 22 }, { "index": "TUNA_0374", "video_path": "VELOCITI/v_UJ_zLBr1NxE_seg_75_85.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a man lying on the ground. The man turns around in panic and looks forward, with a blonde woman facing away from the camera on his right side.\nThe camera cuts to a wide view of the interior of a capsule, with a black spherical device in the center. Three soldiers in military uniforms lie on the floor near the device. The capsule shakes, lifting the soldiers off the ground, and one soldier floats upward, colliding with the top of the capsule.\nThe camera cuts to a man in a brown jacket and a blonde woman appearing in the frame. Both are affected by the capsule’s shaking, causing them to hit the capsule ceiling. The man grabs the woman’s collar from behind with his left hand, then turns and grabs a fixed part of the ceiling with his right hand. The woman spins 180 degrees due to the movement. The man holds the woman as they rotate, attempting to grab the fixed object again, but misses, causing both to continue tumbling to the floor. The camera follows their movement, first moving right, then downwards to the right. The items and equipment in the background of the capsule shake as well.", "events": [ { "event": "The video begins with the camera focused on a man lying on the ground. The man turns around in panic and looks forward, with a blonde woman facing away from the camera on his right side.", "visual_elements": [ { "content": "The video begins with the camera focused on a man lying on the ground.", "type": "camera", "weight": 3 }, { "content": "The man turns around in panic and looks forward.", "type": "action", "weight": 3 }, { "content": "A blonde woman is facing away from the camera on the man's right side.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera cuts to a wide view of the interior of a capsule, with a black spherical device in the center. Three soldiers in military uniforms lie on the floor near the device. The capsule shakes, lifting the soldiers off the ground, and one soldier floats upward, colliding with the top of the capsule.", "visual_elements": [ { "content": "The camera cuts to a wide view of the interior of a capsule.", "type": "camera", "weight": 2 }, { "content": "A black spherical device is positioned in the center of the capsule.", "type": "attribute", "weight": 3 }, { "content": "Three soldiers in military uniforms are lying scattered on the ground near the device.", "type": "attribute", "weight": 3 }, { "content": "The capsule shakes.", "type": "action", "weight": 3 }, { "content": "The three soldiers are jolted off the ground.", "type": "action", "weight": 3 }, { "content": "One soldier floats up and collides with the top of the capsule.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a man in a brown jacket and a blonde woman appearing in the frame. Both are affected by the capsule’s shaking, causing them to hit the capsule ceiling. The man grabs the woman’s collar from behind with his left hand, then turns and grabs a fixed part of the ceiling with his right hand. The woman spins 180 degrees due to the movement. The man holds the woman as they rotate, attempting to grab the fixed object again, but misses, causing both to continue tumbling to the floor. The camera follows their movement, first moving right, then downwards to the right. The items and equipment in the background of the capsule shake as well.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A man in a brown jacket and a blonde woman appear in the frame.", "type": "attribute", "weight": 3 }, { "content": "The man and woman are also affected by the shaking and hit the ceiling of the capsule.", "type": "action", "weight": 3 }, { "content": "The man grabs the collar of the woman from behind with his left hand.", "type": "attribute", "weight": 2 }, { "content": "The man turns and grabs a fixed part of the ceiling with his right hand.", "type": "action", "weight": 3 }, { "content": "The woman spins 180 degrees due to the shaking.", "type": "action", "weight": 3 }, { "content": "The man holds the woman as they roll once, trying again to grab a fixed point.", "type": "action", "weight": 3 }, { "content": "The man misses the handle, causing both to continue tumbling to the floor.", "type": "action", "weight": 3 }, { "content": "The camera follows their movement, first moving right, then downwards to the right.", "type": "camera", "weight": 3 }, { "content": "Items and equipment in the background of the capsule shake as well.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0375", "video_path": "VELOCITI/v_UYzF0CAcN3I_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The scene opens with the camera focusing on a motorcycle, its front decorated with a skull. Two people are riding the bike, both dressed in black and wearing helmets, looking off to their right. The rider has a gas mask on, while the person behind holds a crossbow aimed to their right. In the background, other vehicles race down the highway, kicking up a cloud of dust.\nThe scene cuts to a convertible adorned with spikes. The driver is a muscular man, shirtless, with white hair spiked up. He turns his head to the left and yells, veins standing out on his neck. In the passenger seat is a woman with face paint for battle, and her hair is pinned up with a circle of hairpins.\nThe scene cuts to the highway, where the motorcycle and the convertible are leading the pack, with a few more cars and motorcycles trailing behind.\nThe scene cuts to the side of the convertible. The driver grips the steering wheel tightly, shouting wildly, occasionally glancing to his left. The woman in the passenger seat keeps her eyes fixed on the road ahead.\nThe scene cuts to a black car speeding between the motorcycle and the convertible, then veering to the right, crashing into the motorcycle.\nThe scene cuts, and part of the frame is blocked by some dried yellowish plants. A black car enters from the left side of the frame. The driver is a Black man, and the woman in the passenger seat has short hair, looking to her left.\nThe scene cuts to a wide shot as the black sedan swerves to the left, crashing into the convertible. The camera zooms in on the woman in the passenger seat of the convertible, her expression one of complete shock. The camera zooms out again as the driver of the convertible turns his head to look at the black car.\nThe scene cuts as the driver of the convertible stands up from the driver’s seat.\nThe scene cuts to the side of the convertible, where the driver climbs over to the back seat, getting ready to jump out of the car.\nThe scene cuts to the front of the convertible. The woman in the passenger seat has a blank expression, staring ahead, while in the background, the driver is bent over, getting ready to jump out of the car.", "events": [ { "event": "The scene opens with the camera focusing on a motorcycle, its front decorated with a skull. Two people are riding the bike, both dressed in black and wearing helmets, looking off to their right. The rider has a gas mask on, while the person behind holds a crossbow aimed to their right. In the background, other vehicles race down the highway, kicking up a cloud of dust.", "visual_elements": [ { "content": "The scene opens with the camera focusing on a motorcycle.", "type": "camera", "weight": 3 }, { "content": "Its front decorated with a skull.", "type": "attribute", "weight": 2 }, { "content": "Two people are riding the bike, both dressed in black and wearing helmets.", "type": "attribute", "weight": 3 }, { "content": "Looking off to their right.", "type": "attribute", "weight": 2 }, { "content": "The rider has a gas mask on.", "type": "attribute", "weight": 2 }, { "content": "While the person behind holds a crossbow aimed to their right.", "type": "attribute", "weight": 2 }, { "content": "In the background, other vehicles race down the highway, kicking up a cloud of dust.", "type": "scene", "weight": 2 } ] }, { "event": "The scene cuts to a convertible adorned with spikes. The driver is a muscular man, shirtless, with white hair spiked up. He turns his head to the left and yells, veins standing out on his neck. In the passenger seat is a woman with face paint for battle, and her hair is pinned up with a circle of hairpins.", "visual_elements": [ { "content": "The scene cuts to a convertible adorned with spikes.", "type": "camera", "weight": 3 }, { "content": "The driver is a muscular man, shirtless, with white hair spiked up.", "type": "attribute", "weight": 3 }, { "content": "He turns his head to the left and yells, veins standing out on his neck.", "type": "action", "weight": 3 }, { "content": "In the passenger seat is a woman with face paint for battle, and her hair is pinned up with a circle of hairpins.", "type": "attribute", "weight": 3 } ] }, { "event": "The scene cuts to the highway, where the motorcycle and the convertible are leading the pack, with a few more cars and motorcycles trailing behind.", "visual_elements": [ { "content": "The scene cuts to the highway,", "type": "camera", "weight": 3 }, { "content": "Where the motorcycle and the convertible are leading the pack, with a few more cars and motorcycles trailing behind.", "type": "attribute", "weight": 3 } ] }, { "event": "The scene cuts to the side of the convertible. The driver grips the steering wheel tightly, shouting wildly, occasionally glancing to his left. The woman in the passenger seat keeps her eyes fixed on the road ahead.", "visual_elements": [ { "content": "The scene cuts to the side of the convertible.", "type": "camera", "weight": 3 }, { "content": "The driver grips the steering wheel tightly, shouting wildly.", "type": "action", "weight": 3 }, { "content": "Occasionally glancing to his left.", "type": "action", "weight": 2 }, { "content": "The woman in the passenger seat keeps her eyes fixed on the road ahead.", "type": "attribute", "weight": 2 } ] }, { "event": "The scene cuts to a black car speeding between the motorcycle and the convertible, then veering to the right, crashing into the motorcycle.", "visual_elements": [ { "content": "The scene cuts.", "type": "camera", "weight": 3 }, { "content": "A black car speeding between the motorcycle and convertible.", "type": "action", "weight": 3 }, { "content": "The black car veering to the right, crashing into the motorcycle.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts, and part of the frame is blocked by some dried yellowish plants. A black car enters from the left side of the frame. The driver is a Black man, and the woman in the passenger seat has short hair, looking to her left.", "visual_elements": [ { "content": "The scene cuts,", "type": "camera", "weight": 3 }, { "content": "Part of the frame is blocked by some dried yellowish plants.", "type": "attribute", "weight": 2 }, { "content": "A black car enters from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The driver is a Black man,", "type": "attribute", "weight": 1 }, { "content": "The woman in the passenger seat has short hair, looking to her left.", "type": "attribute", "weight": 2 } ] }, { "event": "The scene cuts to a wide shot as the black sedan swerves to the left, crashing into the convertible. The camera zooms in on the woman in the passenger seat of the convertible, her expression one of complete shock. The camera zooms out again as the driver of the convertible turns his head to look at the black car.", "visual_elements": [ { "content": "The scene cuts to a wide shot.", "type": "camera", "weight": 3 }, { "content": "The black sedan swerves to the left, crashing into the convertible.", "type": "action", "weight": 3 }, { "content": "The camera zooms.", "type": "camera", "weight": 3 }, { "content": "The woman in the passenger seat of the convertible, her expression one of complete shock.", "type": "attribute", "weight": 1 }, { "content": "The camera zooms out again", "type": "camera", "weight": 3 }, { "content": "The driver of the convertible turns his head to look at the black car.", "type": "action", "weight": 2 } ] }, { "event": "The scene cuts as the driver of the convertible stands up from the driver’s seat.", "visual_elements": [ { "content": "The scene cuts", "type": "camera", "weight": 3 }, { "content": "The driver of the convertible stands up from the driver’s seat.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to the side of the convertible, where the driver climbs over to the back seat, getting ready to jump out of the car.", "visual_elements": [ { "content": "The scene cuts to the side of the convertible,", "type": "camera", "weight": 3 }, { "content": "the driver climbs over to the back seat, getting ready to jump out of the car.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to the front of the convertible. The woman in the passenger seat has a blank expression, staring ahead, while in the background, the driver is bent over, getting ready to jump out of the car.", "visual_elements": [ { "content": "The scene cuts to the front of the convertible.", "type": "camera", "weight": 3 }, { "content": "The woman in the passenger seat has a blank expression, staring ahead", "type": "attribute", "weight": 2 }, { "content": "In the background, the driver is bent over, getting ready to jump out of the car.", "type": "scene", "weight": 1 } ] } ], "n_events": 10, "n_elements": 38 }, { "index": "TUNA_0376", "video_path": "VELOCITI/v_UYzF0CAcN3I_seg_95_105.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a man’s profile. Opposite him is someone with braids. The man shakes the person violently while yelling at them.\nThe scene cuts to the outside of a black sedan, its front window clearly marked with a bullet hole. A pair of feet in leather shoes are resting on the driver’s side window, slowly moving inside the car. In the background, a white car can be seen, with someone chasing behind it.\nThe scene cuts to an interior shot. In the passenger seat is a middle-aged man wearing a dark gray T-shirt. With a terrified expression, he pushes the person on his right toward the outside. In the background, a woman in the back seat is helping him.\nThe scene cuts to a woman lying down, wearing a black tank top. She’s struggling with the person above her.\nThe scene cuts to the man above the woman. He’s shirtless, his face twisted in a snarl as he moves closer to her.\nThe scene cuts as the man in the passenger seat leans to his right.\nThe scene cuts to the car’s steering wheel, where an arm reaches in from the left side to grab it.\nThe scene cuts to the outside of the car. A man is hanging from the side of a silver car, covered in bullet holes, waving forward. In the background, a person on a motorcycle can be seen. The camera pans to the right, and the frame is taken up by the headlights of a brightly glowing car.\nThe camera cuts to a woman fighting with someone in the car, woman's neck being strangled by her opponent.\nThe scene cuts to the woman's opponent. The shirtless man, with his white hair spiked up, is tightly choking her. In the background, another woman in the front seat is trying to stop the man.\nThe scene cuts to a close-up of the shirtless man's face.\nThe scene cuts to a close-up of the woman being choked.\nThe scene cuts to a customized convertible, covered in spikes, with a skull hanging from the front. Someone is sitting in the right front seat of the convertible, which is speeding down the road.\nThe scene cuts as the convertible drives into a grassy wasteland. In the background, there’s a horizontal road.", "events": [ { "event": "The camera focuses on a man’s profile. Opposite him is someone with braids. The man shakes the person violently while yelling at them.", "visual_elements": [ { "content": "The camera focuses on a man’s profile.", "type": "camera", "weight": 3 }, { "content": "Opposite him is someone with braids.", "type": "attribute", "weight": 1 }, { "content": "The man shakes the person violently while yelling at them.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to the outside of a black sedan, its front window clearly marked with a bullet hole. A pair of feet in leather shoes are resting on the driver’s side window, slowly moving inside the car. In the background, a white car can be seen, with someone chasing behind it.", "visual_elements": [ { "content": "The scene cuts to the outside of a black sedan,", "type": "camera", "weight": 3 }, { "content": "The car's front window clearly marked with a bullet hole.", "type": "attribute", "weight": 2 }, { "content": "A pair of feet in leather shoes are resting on the driver’s side window", "type": "attribute", "weight": 2 }, { "content": "slowly moving inside the car.", "type": "action", "weight": 3 }, { "content": "In the background, a white car can be seen,", "type": "scene", "weight": 2 }, { "content": "A white car with someone chasing behind it.", "type": "scene", "weight": 2 } ] }, { "event": "The scene cuts to an interior shot. In the passenger seat is a middle-aged man wearing a dark gray T-shirt. With a terrified expression, he pushes the person on his right toward the outside. In the background, a woman in the back seat is helping him.", "visual_elements": [ { "content": "The scene cuts to an interior shot.", "type": "camera", "weight": 3 }, { "content": "In the passenger seat is a middle-aged man wearing a dark gray T-shirt.", "type": "attribute", "weight": 3 }, { "content": "With a terrified expression, he pushes the person on his right toward the outside.", "type": "action", "weight": 3 }, { "content": "In the background, a woman in the back seat is helping him.", "type": "scene", "weight": 1 } ] }, { "event": "The scene cuts to a woman lying down, wearing a black tank top. She’s struggling with the person above her.", "visual_elements": [ { "content": "The scene cuts to a woman.", "type": "camera", "weight": 3 }, { "content": "A woman lying down, wearing a black tank top.", "type": "attribute", "weight": 2 }, { "content": "She’s struggling with the person above her.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to the man above the woman. He’s shirtless, his face twisted in a snarl as he moves closer to her.", "visual_elements": [ { "content": "The scene cuts to the man above the woman.", "type": "camera", "weight": 3 }, { "content": "He’s shirtless, his face twisted in a snarl as he moves closer to her.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts as the man in the passenger seat leans to his right.", "visual_elements": [ { "content": "The scene cuts.", "type": "camera", "weight": 3 }, { "content": "The man in the passenger seat leans to his right.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to the car’s steering wheel, where an arm reaches in from the left side to grab it.", "visual_elements": [ { "content": "The scene cuts to the car’s steering wheel,", "type": "camera", "weight": 3 }, { "content": "An arm reaches in from the left side to grab it.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to the outside of the car. A man is hanging from the side of a silver car, covered in bullet holes, waving forward. In the background, a person on a motorcycle can be seen. The camera pans to the right, and the frame is taken up by the headlights of a brightly glowing car.", "visual_elements": [ { "content": "The scene cuts to the outside of the car.", "type": "camera", "weight": 3 }, { "content": "A man is hanging from the side of a silver car, covered in bullet holes, waving forward.", "type": "action", "weight": 3 }, { "content": "In the background, a person on a motorcycle can be seen.", "type": "scene", "weight": 2 }, { "content": "The camera pans to the right,", "type": "camera", "weight": 3 }, { "content": "The frame is taken up by the headlights of a brightly glowing car.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to a woman fighting with someone in the car, woman's neck being strangled by her opponent.", "visual_elements": [ { "content": "The camera cuts to a woman fighting with someone in the car", "type": "camera", "weight": 3 }, { "content": "Woman's neck being strangled by her opponent.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to the woman's opponent. The shirtless man, with his white hair spiked up, is tightly choking her. In the background, another woman in the front seat is trying to stop the man.", "visual_elements": [ { "content": "The scene cuts to the woman's opponent.", "type": "camera", "weight": 3 }, { "content": "The shirtless man, with his white hair spiked up, is tightly choking her.", "type": "action", "weight": 3 }, { "content": "In the background, another woman in the front seat is trying to stop the man.", "type": "scene", "weight": 2 } ] }, { "event": "The scene cuts to a close-up of the shirtless man's face.", "visual_elements": [ { "content": "The scene cuts to a close-up of the shirtless man's face.", "type": "camera", "weight": 3 } ] }, { "event": "The scene cuts to a close-up of the woman being choked.", "visual_elements": [ { "content": "The scene cuts to a close-up of the woman being choked.", "type": "camera", "weight": 3 } ] }, { "event": "The scene cuts to a customized convertible, covered in spikes, with a skull hanging from the front. Someone is sitting in the right front seat of the convertible, which is speeding down the road.", "visual_elements": [ { "content": "The scene cuts to a customized convertible, covered in spikes, with a skull hanging from the front.", "type": "camera", "weight": 3 }, { "content": "Someone is sitting in the right front seat of the convertible,", "type": "attribute", "weight": 2 }, { "content": "Convertible is speeding down the road.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts as the convertible drives into a grassy wasteland. In the background, there’s a horizontal road.", "visual_elements": [ { "content": "The scene cuts", "type": "camera", "weight": 3 }, { "content": "The convertible drives into a grassy wasteland.", "type": "action", "weight": 3 }, { "content": "In the background, there’s a horizontal road.", "type": "scene", "weight": 2 } ] } ], "n_events": 14, "n_elements": 40 }, { "index": "TUNA_0377", "video_path": "VELOCITI/v_UZb2NOHPA2A_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video starts with the camera focused on a Black man in a black suit. The camera moves to the right as he bends down to defend against a young boy in a blue hoodie and jeans. The boy runs to the left of the man. In the background, there’s a basketball court surrounded by a chain-link fence, with some buildings visible in the distance.\nThe scene cuts to the Black man’s back. He leans forward and pats the boy on the back. The boy, holding a basketball, runs to the left side of the frame.\nThe scene cuts to a wide shot of the basketball court. The boy stands to the right of the hoop and successfully makes a shot. He walks under the hoop to catch the basketball as it bounces back up from the ground. Holding the ball, the boy runs to the left side of the frame. The Black man stands off to the side, watching the boy’s moves.\nFinally, the scene cuts back to the Black man. He adjusts the edges of his jacket with both hands, smiling as he looks ahead and speaks.", "events": [ { "event": "The video starts with the camera focused on a Black man in a black suit. The camera moves to the right as he bends down to defend against a young boy in a blue hoodie and jeans. The boy runs to the left of the man. In the background, there’s a basketball court surrounded by a chain-link fence, with some buildings visible in the distance.", "visual_elements": [ { "content": "The video starts with the camera focused on a Black man in a black suit.", "type": "camera", "weight": 3 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The black man bends down to defend against a young boy in a blue hoodie and jeans.", "type": "action", "weight": 3 }, { "content": "The boy runs to the left of the man.", "type": "action", "weight": 3 }, { "content": "In the background, there’s a basketball court surrounded by a chain-link fence", "type": "scene", "weight": 2 }, { "content": "Along with some buildings visible in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "The scene cuts to the Black man’s back. He leans forward and pats the boy on the back. The boy, holding a basketball, runs to the left side of the frame.", "visual_elements": [ { "content": "The scene cuts to the Black man’s back.", "type": "camera", "weight": 3 }, { "content": "The black man leans forward and pats the boy on the back.", "type": "action", "weight": 3 }, { "content": "The boy, holding a basketball, runs to the left side of the frame.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to a wide shot of the basketball court. The boy stands to the right of the hoop and successfully makes a shot. He walks under the hoop to catch the basketball as it bounces back up from the ground. Holding the ball, the boy runs to the left side of the frame. The Black man stands off to the side, watching the boy’s moves.", "visual_elements": [ { "content": "The scene cuts to a wide shot of the basketball court.", "type": "camera", "weight": 3 }, { "content": "The boy stands to the right of the hoop and successfully makes a shot.", "type": "action", "weight": 3 }, { "content": "The boy walks under the hoop to catch the basketball as it bounces back up from the ground.", "type": "action", "weight": 3 }, { "content": "The boy holding the ball, runs to the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The Black man stands off to the side, watching the boy’s moves.", "type": "attribute", "weight": 3 } ] }, { "event": "Finally, the scene cuts back to the Black man. He adjusts the edges of his jacket with both hands, smiling as he looks ahead and speaks.", "visual_elements": [ { "content": "Finally, the scene cuts back to the Black man.", "type": "camera", "weight": 3 }, { "content": "Black man adjusts the edges of his jacket with both hands,", "type": "action", "weight": 3 }, { "content": "Black man is smiling as he looks ahead and speaks.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0378", "video_path": "VELOCITI/v_UepHO767tO8_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a dimly lit valley, surrounded by snow. Standing in the narrow passage connecting two valleys is a person in a red hiking suit. In the background, on an overhanging cliff in the distance, stands another person. The hiker in the red suit carefully turns around in the narrow passage, leaning forward with arms outstretched to maintain balance. After completing the turn, the hiker looks over his right shoulder, gazing at the person standing on the jutting cliff.\nThe scene then cuts to the other person standing on the jutting cliff. The man is wearing a blue hiking suit and a brown knit hat. He looks down to adjust the climbing rope tied around his waist, then raises his head to look ahead.", "events": [ { "event": "The video begins with the camera focused on a dimly lit valley, surrounded by snow. Standing in the narrow passage connecting two valleys is a person in a red hiking suit. In the background, on an overhanging cliff in the distance, stands another person. The hiker in the red suit carefully turns around in the narrow passage, leaning forward with arms outstretched to maintain balance. After completing the turn, the hiker looks over his right shoulder, gazing at the person standing on the jutting cliff.", "visual_elements": [ { "content": "The video begins with the camera focused on a dimly lit valley, surrounded by snow.", "type": "camera", "weight": 3 }, { "content": "Standing in the narrow passage connecting two valleys is a person in a red hiking suit.", "type": "attribute", "weight": 3 }, { "content": "In the background of the picture, on an overhanging cliff in the distance, stands another person.", "type": "scene", "weight": 1 }, { "content": "The hiker in the red suit carefully turns around in the narrow passage,", "type": "action", "weight": 3 }, { "content": "The hiker in the red leaning forward with arms outstretched to maintain balance.", "type": "attribute", "weight": 3 }, { "content": "After completing the turn, the hiker in the red looks over his right shoulder, gazing at the person standing on the jutting cliff.", "type": "action", "weight": 3 } ] }, { "event": "The scene then cuts to the other person standing on the jutting cliff. The man is wearing a blue hiking suit and a brown knit hat. He looks down to adjust the climbing rope tied around his waist, then raises his head to look ahead.", "visual_elements": [ { "content": "The scene then cuts to the other person standing on the jutting cliff.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a blue hiking suit and a brown knit hat.", "type": "attribute", "weight": 2 }, { "content": "The man is wearing a blue hiking suit looks down to adjust the climbing rope tied around his waist", "type": "action", "weight": 3 }, { "content": "The man is wearing a blue hiking suit then raises his head to look ahead.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0379", "video_path": "VELOCITI/v_UvDzmAFiUj8_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera capturing two towering mountains from a low angle. At the top of the mountain on the left, a person can be seen. In the background, the blue sky and white clouds stretch out.\nThe scene cuts to a hiker on the left, wearing a yellow jacket and carrying a backpack, kneeling with his back to the camera. Another hiker, dressed in dark clothing with climbing gear strapped around his waist, slowly enters from the left side of the frame. The man in dark clothing stumbles over to kneel beside the first hiker, dropping the trekking pole he was holding in his right hand as he lowers himself. In the background, the snow-covered mountain on the opposite side can be seen.", "events": [ { "event": "The video begins with the camera capturing two towering mountains from a low angle. At the top of the mountain on the left, a person can be seen. In the background, the blue sky and white clouds stretch out.", "visual_elements": [ { "content": "The video begins with the camera capturing two towering mountains from a low angle.", "type": "camera", "weight": 3 }, { "content": "At the top of the mountain on the left, a person can be seen.", "type": "scene", "weight": 2 }, { "content": "In the background, the blue sky and white clouds stretch out.", "type": "scene", "weight": 2 } ] }, { "event": "The scene cuts to a hiker on the left, wearing a yellow jacket and carrying a backpack, kneeling with his back to the camera. Another hiker, dressed in dark clothing with climbing gear strapped around his waist, slowly enters from the left side of the frame. The man in dark clothing stumbles over to kneel beside the first hiker, dropping the trekking pole he was holding in his right hand as he lowers himself. In the background, the snow-covered mountain on the opposite side can be seen.", "visual_elements": [ { "content": "The scene cuts", "type": "camera", "weight": 3 }, { "content": "A hiker on the left, wearing a yellow jacket and carrying a backpack, kneeling with his back to the camera.", "type": "attribute", "weight": 3 }, { "content": "Another hiker, dressed in dark clothing with climbing gear strapped around his waist, slowly enters from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The man in dark clothing stumbles over to kneel beside the first hiker.", "type": "action", "weight": 3 }, { "content": "The hiker dropping the trekking pole he was holding in his right hand as he lowers himself.", "type": "action", "weight": 3 }, { "content": "In the background, the snow-covered mountain on the opposite side can be seen.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0380", "video_path": "VELOCITI/v_UyOxOyfX4uM_seg_25_35.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a man wearing a blue sleeveless shirt and a purple headscarf. He has a tattoo near his left shoulder. The man is standing in a large kitchen, with shelves, dishes, and other staff visible in the background. He turns his body slightly, looking over his right shoulder.The man then turns around and walks to a table behind him, using his right hand to pick up a plastic basin filled with utensils. He walks back, sets the basin down, then looks down and reaches into it with his right hand to grab something. Afterward, he stands up straight and looks ahead.", "events": [ { "event": "The video begins with the camera focused on a man wearing a blue sleeveless shirt and a purple headscarf. He has a tattoo near his left shoulder. The man is standing in a large kitchen, with shelves, dishes, and other staff visible in the background. He turns his body slightly, looking over his right shoulder.The man then turns around and walks to a table behind him, using his right hand to pick up a plastic basin filled with utensils. He walks back, sets the basin down, then looks down and reaches into it with his right hand to grab something. Afterward, he stands up straight and looks ahead.", "visual_elements": [ { "content": "The man in dark clothing stumbles over to kneel beside the first hiker", "type": "camera", "weight": 3 }, { "content": "He has a tattoo near his left shoulder.", "type": "attribute", "weight": 2 }, { "content": "The man is standing in a large kitchen environment.", "type": "scene", "weight": 2 }, { "content": "Shelves, dishes, and other staff visible in the background.", "type": "scene", "weight": 2 }, { "content": "The man turns his body slightly, looking over his right shoulder.", "type": "attribute", "weight": 3 }, { "content": "The man then turns around and walks to a table behind him, using his right hand to pick up a plastic basin filled with utensils.", "type": "action", "weight": 3 }, { "content": "The man walks back, sets the basin down.", "type": "action", "weight": 3 }, { "content": "The man looks down and reaches into it with his right hand to grab something.", "type": "action", "weight": 3 }, { "content": "The man stands up straight and looks ahead.", "type": "action", "weight": 2 } ] } ], "n_events": 1, "n_elements": 9 }, { "index": "TUNA_0381", "video_path": "VELOCITI/v_V3EpNkdgmyo_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on an office setting. It pans to the right, revealing a blonde woman in a blue shirt and black pants, walking toward the right side of the frame while brushing her hair back. She’s carrying a dark blue canvas bag over her right shoulder and holding a black folder in her right hand. The woman reaches the office door, holding the folder with both hands as she looks up and greets the person inside.\nThe scene cuts to a woman with black hair, wearing a gray suit jacket. She smiles, leans forward, and extends her right hand to shake hands with the person in front of her. She then looks to her left front and gestures with her left hand. The woman lowers herself into a seat, brushing her hair back with both hands. The camera moves downward to the right. In the foreground, a woman in a blue shirt and black pants enters from the left side of the frame.\nThe scene cuts to behind the black-haired woman. On the computer screen in front of her, some tables are displayed. The blonde woman, sitting across from her, sets her bag down, then raises both hands to brush her hair back.", "events": [ { "event": "The camera focuses on an office setting. It pans to the right, revealing a blonde woman in a blue shirt and black pants, walking toward the right side of the frame while brushing her hair back. She’s carrying a dark blue canvas bag over her right shoulder and holding a black folder in her right hand. The woman reaches the office door, holding the folder with both hands as she looks up and greets the person inside.", "visual_elements": [ { "content": "The camera focuses on an office setting.", "type": "camera", "weight": 3 }, { "content": "It pans to the right", "type": "camera", "weight": 3 }, { "content": "The blonde woman in a blue shirt and black pants, walking toward the right side of the frame while brushing her hair back.", "type": "action", "weight": 3 }, { "content": "The blonde woman carrying a dark blue canvas bag over her right shoulder and holding a black folder in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The blonde woman reaches the office door, holding the folder with both hands as she looks up and greets the person inside.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to a woman with black hair, wearing a gray suit jacket. She smiles, leans forward, and extends her right hand to shake hands with the person in front of her. She then looks to her left front and gestures with her left hand. The woman lowers herself into a seat, brushing her hair back with both hands. The camera moves downward to the right. In the foreground, a woman in a blue shirt and black pants enters from the left side of the frame.", "visual_elements": [ { "content": "The scene cuts to a woman with black hair, wearing a gray suit jacket.", "type": "camera", "weight": 3 }, { "content": "The black haired woman smiles, leans forward, and extends her right hand to shake hands with the person in front of her.", "type": "action", "weight": 3 }, { "content": "The black haired woman looks to her left front and gestures with her left hand.", "type": "action", "weight": 3 }, { "content": "The black haired woman lowers herself into a seat, brushing her hair back with both hands.", "type": "action", "weight": 3 }, { "content": "The camera moves downward to the right.", "type": "camera", "weight": 3 }, { "content": "In the foreground, a woman in a blue shirt and black pants enters from the left side of the frame.", "type": "action", "weight": 3 } ] }, { "event": "The scene cuts to behind the black-haired woman. On the computer screen in front of her, some tables are displayed. The blonde woman, sitting across from her, sets her bag down, then raises both hands to brush her hair back.", "visual_elements": [ { "content": "The scene cuts to behind the black-haired woman.", "type": "camera", "weight": 3 }, { "content": "On the computer screen in front of the black-haired woman, some tables are displayed.", "type": "attribute", "weight": 2 }, { "content": "The blonde woman, sitting across the black-haired woman.", "type": "action", "weight": 3 }, { "content": "The black haired woman raises both hands to brush her hair back.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0382", "video_path": "VELOCITI/v_V99QCtwXKsI_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the frame focuses on two characters. The one facing away from the camera is a male wearing a gray hoodie, who is gripping the clothing of the male in front of him who is wearing a red jacket. The male in the red jacket is a head taller than the male in the gray hoodie. The male in the gray hoodie looks up at the male opposite him. The male in the red jacket tilts his head to his right and remains silent. The male in the gray hoodie taps the left shoulder of the male in the red jacket with his right hand. The scene is set in a courtyard at night, with a dense bush visible on the right side of the background.\nThe scene cuts to a top-down view. The man in the gray hoodie releases his grip on the man in the red jacket and walks toward the bush behind him. He kneels down behind the bush, curling his body up. The camera moves upward as the man in the red jacket watches the movements of the man in the gray hoodie. He steps to his left, then stops, placing his left hand on the wooden wall to his left, looking down at the man in the gray hoodie behind the bush.", "events": [ { "event": "At the beginning of the video, the frame focuses on two characters. The one facing away from the camera is a male wearing a gray hoodie, who is gripping the clothing of the male in front of him who is wearing a red jacket. The male in the red jacket is a head taller than the male in the gray hoodie. The male in the gray hoodie looks up at the male opposite him. The male in the red jacket tilts his head to his right and remains silent. The male in the gray hoodie taps the left shoulder of the male in the red jacket with his right hand. The scene is set in a courtyard at night, with a dense bush visible on the right side of the background.", "visual_elements": [ { "content": "The scene focuses on two characters.", "type": "camera", "weight": 3 }, { "content": "The man in the gray hoodie has his back to the camera.", "type": "attribute", "weight": 3 }, { "content": "The man in the gray hoodie grabs the shirt of the man in the red jacket.", "type": "attribute", "weight": 3 }, { "content": "The man in the red jacket is a head taller than the man in the grey and blue hoodie.", "type": "attribute", "weight": 2 }, { "content": "The man in the grey hoodie is looking up at the man opposite him.", "type": "attribute", "weight": 3 }, { "content": "The male in the red jacket tilts his head to his right and remains silent.", "type": "attribute", "weight": 3 }, { "content": "The male in the gray hoodie taps the left shoulder of the male in the red jacket with his right hand.", "type": "action", "weight": 3 }, { "content": "The scene is set in a courtyard at night.", "type": "scene", "weight": 2 }, { "content": "There is a dense bush on the right side of the background.", "type": "scene", "weight": 2 } ] }, { "event": "The scene cuts to a top-down view. The man in the gray hoodie releases his grip on the man in the red jacket and walks toward the bush behind him. He kneels down behind the bush, curling his body up. The camera moves upward as the man in the red jacket watches the movements of the man in the gray hoodie. He steps to his left, then stops, placing his left hand on the wooden wall to his left, looking down at the man in the gray hoodie behind the bush.", "visual_elements": [ { "content": "The camera cuts toan overhead angle.", "type": "camera", "weight": 3 }, { "content": "The man in the gray hoodie releases the man in the red jacket's clothing and walks towards the bush behind him.", "type": "action", "weight": 3 }, { "content": "The man in the gray hoodie walks behind the bush, first kneeling down and then curling up his body.", "type": "action", "weight": 3 }, { "content": "The camera pans up.", "type": "camera", "weight": 3 }, { "content": "The man in the red jacket watches the movements of the man in the gray hoodie.", "type": "attribute", "weight": 3 }, { "content": "The man in the red jacket walks to his left", "type": "action", "weight": 3 }, { "content": "The man in the red jacke stops, supporting himself with his left hand on a wooden wall, and looks down at the man in the gray hoodie behind the bush.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0383", "video_path": "VELOCITI/v_VUChuDMVqvY_seg_105_115.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a young woman. She’s wearing a pink long-sleeve shirt, layered with a colorful patterned tank top. Her hair is thick, brown, and short, with curly waves, and she’s wearing a pink hair accessory. To her left is a glass window with rain falling outside, the raindrops leaving streaks on the glass. She’s holding a black wallet in her hands.The camera moves downward to the right as the woman bends over and places the black wallet under a blanket on the right side of the frame. In the bottom right corner, a white and yellow plush toy covered by the blanket is visible. After placing the wallet, she pulls her hands back and stands up straight, her gaze fixed on the plush toy in the bottom right corner.", "events": [ { "event": "The video begins with the camera focused on a young woman. She’s wearing a pink long-sleeve shirt, layered with a colorful patterned tank top. Her hair is thick, brown, and short, with curly waves, and she’s wearing a pink hair accessory. To her left is a glass window with rain falling outside, the raindrops leaving streaks on the glass. She’s holding a black wallet in her hands.The camera moves downward to the right as the woman bends over and places the black wallet under a blanket on the right side of the frame. In the bottom right corner, a white and yellow plush toy covered by the blanket is visible. After placing the wallet, she pulls her hands back and stands up straight, her gaze fixed on the plush toy in the bottom right corner.", "visual_elements": [ { "content": "The camera focuses on a young woman.", "type": "camera", "weight": 3 }, { "content": "The woman wears a pink long-sleeved shirt, with a colorfully patterned tank top over it.", "type": "attribute", "weight": 2 }, { "content": "The woman's hair is thick, brown, and short, with curly waves, and she wears a pink hair accessory.", "type": "attribute", "weight": 2 }, { "content": "To the left of the woman is a glass window.", "type": "attribute", "weight": 2 }, { "content": "Outside the window, it's raining, and the raindrops hit the glass, leaving streaks of water.", "type": "scene", "weight": 2 }, { "content": "The girl holds a black wallet in her hand.", "type": "attribute", "weight": 2 }, { "content": "The camera pans downward to the right.", "type": "camera", "weight": 3 }, { "content": "The woman bends down and places the black wallet she's holding under a quilt on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "In the lower right corner of the screen, a white and yellow stuffed toy covered with a quilt can be seen.", "type": "scene", "weight": 2 }, { "content": "After placing the wallet, the woman withdraws her hands and stands up straight.", "type": "action", "weight": 3 }, { "content": "The woman's gaze remains fixed on the stuffed toy in the lower right corner of the screen.", "type": "attribute", "weight": 3 } ] } ], "n_events": 1, "n_elements": 11 }, { "index": "TUNA_0384", "video_path": "VELOCITI/v_VUChuDMVqvY_seg_115_125.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a young woman. She’s wearing a pink long-sleeve shirt, layered with a colorful patterned tank top. Her hair is thick, brown, and short, with curly waves, and she’s wearing a pink hair accessory. To her left is a glass window with rain falling outside, the raindrops leaving streaks on the glass. The camera moves downward to the right as the woman bends sideways and sits down on the window seat to her left. Once seated, she lifts her legs onto the seat, pulling them up one at a time, then wraps her arms around her knees. Her gaze is fixed on the plush toy in the bottom right corner of the frame, which is covered by a yellow and white blanket.\nThe scene cuts to a close-up of the plush toy’s face. It’s a white duck with a yellow beak, resting its head on a pillow and covered by a patchwork quilt. The duck’s eyes are tightly shut, and its beak opens and closes.", "events": [ { "event": "The video begins with the camera focused on a young woman. She’s wearing a pink long-sleeve shirt, layered with a colorful patterned tank top. Her hair is thick, brown, and short, with curly waves, and she’s wearing a pink hair accessory. To her left is a glass window with rain falling outside, the raindrops leaving streaks on the glass. The camera moves downward to the right as the woman bends sideways and sits down on the window seat to her left. Once seated, she lifts her legs onto the seat, pulling them up one at a time, then wraps her arms around her knees. Her gaze is fixed on the plush toy in the bottom right corner of the frame, which is covered by a yellow and white blanket.", "visual_elements": [ { "content": "The camera focused on a young woman.", "type": "camera", "weight": 3 }, { "content": "The woman wears a pink long-sleeve shirt, layered with a colorful patterned tank top.", "type": "attribute", "weight": 2 }, { "content": "The woman's hair is thick, brown, and short, with curly waves, and she wears a pink hair accessory.", "type": "attribute", "weight": 2 }, { "content": "To the left of the woman is a glass window.", "type": "attribute", "weight": 2 }, { "content": "Outside the window, it's raining, and the raindrops hit the glass, leaving streaks of water.", "type": "scene", "weight": 2 }, { "content": "The camera pans downward to the right.", "type": "camera", "weight": 3 }, { "content": "The woman bends sideways and sits down on the window seat to her left.", "type": "action", "weight": 3 }, { "content": "Once seated, the woman lifts her legs onto the seat, pulling them up one at a time", "type": "action", "weight": 3 }, { "content": "The woman wraps her arms around her knees.", "type": "action", "weight": 3 }, { "content": "The woman's gaze is fixed on the plush toy in the bottom right corner of the frame", "type": "attribute", "weight": 3 }, { "content": "At the bottom right corner of the screen, there is a yellow and white stuffed toy covered with a blanket.", "type": "scene", "weight": 2 } ] }, { "event": "The scene cuts to a close-up of the plush toy’s face. It’s a white duck with a yellow beak, resting its head on a pillow and covered by a patchwork quilt. The duck’s eyes are tightly shut, and its beak opens and closes.", "visual_elements": [ { "content": "The scene cuts to a close-up of the plush toy’s face.", "type": "camera", "weight": 3 }, { "content": "It’s a white duck with a yellow beak, resting its head on a pillow and covered by a patchwork quilt.", "type": "attribute", "weight": 3 }, { "content": "The duck’s eyes are tightly shut, and its beak opens and closes.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0385", "video_path": "VELOCITI/v_VYQoxBs5N2A_seg_50_60.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the start of the video, the camera focuses on an empty road, where a man dressed in a black uniform walks forward on the left side of the screen, facing away from the camera. The camera follows the man's movements. Along the left side of the road behind him, some low-rise buildings and trees are visible. The weather is clear and sunny.\nThe camera cuts to a closer view of the man as he approaches the buildings and trees on the left side of the road, still shots from his right rear.\nThe camera cuts again to a view of the man from the first scene, now walking closer to the right side of the road. He turns and looks behind him to his left.\nThe camera cuts as the man places his right hand on the holster at his back and bends down to enter a yard through a wooden fence on the left side of the screen. Inside the yard, a green compact car is parked. As he approaches the green car, the man takes out a baton from his waist with his left hand. He bends down near the rear of the car and taps on the rear window with the baton.\nThe camera cuts to a view of the front of the green car, with the driver's door open. A person dressed in a blue shirt is slumped over the steering wheel. In the background, the man in the black uniform is positioned at the rear of the car.", "events": [ { "event": "At the start of the video, the camera focuses on an empty road, where a man dressed in a black uniform walks forward on the left side of the screen, facing away from the camera. The camera follows the man's movements. Along the left side of the road behind him, some low-rise buildings and trees are visible. The weather is clear and sunny.", "visual_elements": [ { "content": "The camera focuses on an empty road.", "type": "camera", "weight": 3 }, { "content": "A man dressed in a black uniform walks forward on the left side of the screen, facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The man walks forward along the road.", "type": "action", "weight": 3 }, { "content": "The camera follows the man's movements.", "type": "camera", "weight": 3 }, { "content": "Along the left side of the road behind the man, some low-rise buildings and trees are visible.", "type": "scene", "weight": 2 }, { "content": "The weather is clear and sunny.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a closer view of the man as he approaches the buildings and trees on the left side of the road, still shots from his right rear.", "visual_elements": [ { "content": "The camera cuts to a closer view of the man as he approaches the buildings and trees on the left side of the road.", "type": "camera", "weight": 3 }, { "content": "The camera still shots from the man's right rear.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts again to a view of the man from the first scene, now walking closer to the right side of the road. He turns and looks behind him to his left.", "visual_elements": [ { "content": "The camera cuts again to a view of the man from the first scene, now walking closer to the right side of the road.", "type": "camera", "weight": 3 }, { "content": "The man turns and looks behind him to his left.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts as the man places his right hand on the holster at his back and bends down to enter a yard through a wooden fence on the left side of the screen. Inside the yard, a green compact car is parked. As he approaches the green car, the man takes out a baton from his waist with his left hand. He bends down near the rear of the car and taps on the rear window with the baton.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man places his right hand on the holster at his back and bends down to enter a yard through a wooden fence on the left side of the screen.", "type": "action", "weight": 3 }, { "content": "Inside the yard, a green compact car is parked.", "type": "attribute", "weight": 2 }, { "content": "As he approaches the green car, the man takes out a baton from his waist with his left hand.", "type": "action", "weight": 3 }, { "content": "The man bends down near the rear of the car and taps on the rear window with the baton.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a view of the front of the green car, with the driver's door open. A person dressed in a blue shirt is slumped over the steering wheel. In the background, the man in the black uniform is positioned at the rear of the car.", "visual_elements": [ { "content": "The camera cuts to a view of the front of the green car.", "type": "camera", "weight": 3 }, { "content": "The driver's side door is open.", "type": "attribute", "weight": 2 }, { "content": "A person dressed in a blue shirt is slumped over the steering wheel.", "type": "attribute", "weight": 3 }, { "content": "In the background, the man in the black uniform is positioned at the rear of the car.", "type": "scene", "weight": 1 } ] } ], "n_events": 5, "n_elements": 19 }, { "index": "TUNA_0386", "video_path": "VELOCITI/v_VYQoxBs5N2A_seg_65_75.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the inside of a car, with the driver's side door open. A person stands outside the door, while a bald man dressed in a black uniform leans over the steering wheel in the driver's seat. The sleeve of his right arm has the words \"SECURITY OFFICER\" printed on it. The person standing outside uses a black stick to nudge the male leaning over the steering wheel.\nThe camera cuts to an exterior view of the car, where another man dressed in a black uniform approaches a green sedan on the right side of the frame. He walks up to the car door and looks inside, lowering his head. The camera rotates to the left, transitioning from the man's right side to his left.\nThe camera then cuts to the interior of the car.\nThe camera cuts to an exterior view of the car. After glancing inside the car, the man dressed in the black uniform straightens up and looks ahead to his left. He then places the black baton he was holding back at his waist. He turns his head to the right again to look inside the car. The windshield wipers of the green compact car are constantly swaying back and forth.", "events": [ { "event": "At the beginning of the video, the camera focuses on the inside of a car, with the driver's side door open. A person stands outside the door, while a bald man dressed in a black uniform leans over the steering wheel in the driver's seat. The sleeve of his right arm has the words \"SECURITY OFFICER\" printed on it. The person standing outside uses a black stick to nudge the male leaning over the steering wheel.", "visual_elements": [ { "content": "The camera focuses on the inside of a car.", "type": "camera", "weight": 3 }, { "content": "The driver's side door is open.", "type": "attribute", "weight": 2 }, { "content": "A person stands outside the door.", "type": "attribute", "weight": 1 }, { "content": "A bald man dressed in a black uniform leans over the steering wheel in the driver's seat.", "type": "attribute", "weight": 3 }, { "content": "The sleeve of the bald man's right arm has the words \"SECURITY OFFICER\" printed on it.", "type": "attribute", "weight": 2 }, { "content": "The person standing outside uses a black stick to nudge the male leaning over the steering wheel.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts to an exterior view of the car, where another man dressed in a black uniform approaches a green sedan on the right side of the frame. He walks up to the car door and looks inside, lowering his head. The camera rotates to the left, transitioning from the man's right side to his left.", "visual_elements": [ { "content": "The camera cuts to an exterior view of the car.", "type": "camera", "weight": 3 }, { "content": "Another man dressed in a black uniform approaches a green sedan on the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The man walks up to the car door and looks inside", "type": "action", "weight": 3 }, { "content": "The camera rotates to the left, transitioning from the man's right side to his left.", "type": "camera", "weight": 3 } ] }, { "event": "The camera then cuts to the interior of the car.", "visual_elements": [ { "content": "The camera cuts to the interior of the car.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to an exterior view of the car. After glancing inside the car, the man dressed in the black uniform straightens up and looks ahead to his left. He then places the black baton he was holding back at his waist. He turns his head to the right again to look inside the car. The windshield wipers of the green compact car are constantly swaying back and forth.", "visual_elements": [ { "content": "The camera cuts to an exterior view of the car.", "type": "camera", "weight": 3 }, { "content": "After glancing inside the car, the man dressed in the black uniform straightens up and looks ahead to his left.", "type": "action", "weight": 3 }, { "content": "The man places the black baton he was holding back at his waist.", "type": "action", "weight": 3 }, { "content": "The man turns his head to the right again to look inside the car.", "type": "action", "weight": 3 }, { "content": "The windshield wipers of the green compact car are constantly swaying back and forth.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0387", "video_path": "VELOCITI/v_VZrBzpfh6hM_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a male lying on the ground. He wears a light-colored shirt, and there are noticeable bloodstains on his face and head. The camera pans slightly to the right.\nThe camera cuts to a view of a woman's back who wears a blue dress. She rapidly rushes towards a glass door ahead, opens it, and enters the room. Through the glass door, two people can be seen standing inside: a man on the left dressed in a blue suit, and a woman on the right wearing a floral dress. The woman in the floral dress raises her hands, which are slightly trembling, and looks outside with a terrified gaze. In the background, indoor furnishings such as a sofa, table lamp, and coffee table are visible.\nThe camera cuts to a frontal view of a bald man wearing a white short-sleeved shirt. In the foreground, the back of a person wearing a blue top is visible, and the person in the blue top holds a gun pointed at the man in the white shirt. The man in the white shirt takes a few steps forward, while the person in the blue top steps back a few paces. The man in the white shirt turns his head backwards to look inside the room before turning his gaze back to the person with the gun facing him. With a smile, the man in the white shirt speaks to the person pointing the gun at him.\nNext, the camera cuts to a frontal view of the person in the blue top, who stares ahead with both hands gripping a rifle pointed forward.\nThe camera cuts back to a man lying on the ground, his face smeared with blood. The man tries to lift his upper body, his lips trembling as he utters something.\nThe camera cuts to a frontal view of the man in the white short-sleeved shirt.", "events": [ { "event": "At the beginning of the video, the camera focuses on a male lying on the ground. He wears a light-colored shirt, and there are noticeable bloodstains on his face and head. The camera pans slightly to the right.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a male lying on the ground.", "type": "camera", "weight": 3 }, { "content": "The man wears a light-colored shirt.", "type": "attribute", "weight": 2 }, { "content": "There are noticeable bloodstains on the man's face and head.", "type": "attribute", "weight": 3 }, { "content": "The camera pans slightly to the right.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to a view of a woman's back who wears a blue dress. She rapidly rushes towards a glass door ahead, opens it, and enters the room. Through the glass door, two people can be seen standing inside: a man on the left dressed in a blue suit, and a woman on the right wearing a floral dress. The woman in the floral dress raises her hands, which are slightly trembling, and looks outside with a terrified gaze. In the background, indoor furnishings such as a sofa, table lamp, and coffee table are visible.", "visual_elements": [ { "content": "The camera cuts to a view of a woman's back who wears a blue dress.", "type": "camera", "weight": 3 }, { "content": "The woman rapidly rushes towards a glass door ahead.", "type": "action", "weight": 3 }, { "content": "The woman opens it, and enters the room.", "type": "action", "weight": 3 }, { "content": "Through the glass door, two people can be seen standing inside.", "type": "attribute", "weight": 2 }, { "content": "The man on the left is dressed in a blue suit.", "type": "attribute", "weight": 2 }, { "content": "The woman on the right wears a floral dress.", "type": "attribute", "weight": 2 }, { "content": "The woman in the floral dress raises her hands, which are slightly trembling, and looks outside with a terrified gaze.", "type": "action", "weight": 2 }, { "content": "In the background, indoor furnishings such as a sofa, table lamp, and coffee table are visible.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a frontal view of a bald man wearing a white short-sleeved shirt. In the foreground, the back of a person wearing a blue top is visible, and the person in the blue top holds a gun pointed at the man in the white shirt. The man in the white shirt takes a few steps forward, while the person in the blue top steps back a few paces. The man in the white shirt turns his head backwards to look inside the room before turning his gaze back to the person with the gun facing him. With a smile, the man in the white shirt speaks to the person pointing the gun at him.", "visual_elements": [ { "content": "The camera cuts to a frontal view of a bald man wearing a white short-sleeved shirt.", "type": "camera", "weight": 3 }, { "content": "In the foreground, the back of a person wearing a blue top is visible.", "type": "attribute", "weight": 2 }, { "content": "The person in the blue top holds a gun pointed at the man in the white shirt.", "type": "attribute", "weight": 3 }, { "content": "The man in the white shirt takes a few steps forward, while the person in the blue top steps back a few paces.", "type": "action", "weight": 3 }, { "content": "The man in the white shirt turns his head backwards to look inside the room before turning his gaze back to the person with the gun facing him.", "type": "action", "weight": 3 }, { "content": "With a smile, the man in the white shirt speaks to the person pointing the gun at him.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera cuts to a frontal view of the person in the blue top, who stares ahead with both hands gripping a rifle pointed forward.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the person in the blue top", "type": "camera", "weight": 3 }, { "content": "The man stares ahead with both hands gripping a rifle pointed forward.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts back to a man lying on the ground, his face smeared with blood. The man tries to lift his upper body, his lips trembling as he utters something.", "visual_elements": [ { "content": "The camera cuts back to a man lying on the ground, his face smeared with blood.", "type": "camera", "weight": 3 }, { "content": "The man tries to lift his upper body, his lips trembling as he utters something.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a frontal view of the man in the white short-sleeved shirt.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the man in the white short-sleeved shirt.", "type": "camera", "weight": 3 } ] } ], "n_events": 6, "n_elements": 23 }, { "index": "TUNA_0388", "video_path": "VELOCITI/v_VtEOGd7tD9o_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a curly-haired male who is sleeping. The male rests his head on his left arm, tilted to one side. He wears a dirty T-shirt. Suddenly, the male opens his eyes, turns his head to the right, and lifts it to look outside. In the background, a checkered bedsheet is visible.\nNext, the camera cuts to an exterior view of a camping tent. The tent is yellow, surrounded by scattered rocks and vegetation. The curly-haired man pulls open the tent flap and steps out, followed by another man wearing a gray shirt and jeans. Soon after, a woman in a red camisole also emerges from the tent. The three of them stand in front of the tent, gazing ahead.\nThe camera cuts to an open field where a woman, covered in blood all over her body, stands with her back to the camera. The woman wears a white tank top and white panties, and she bents over, dealing with the bloodstains on her body. In front of her stands a stone wall. In the background, therera are see distant and rolling green mountains against a clear, sunny sky.", "events": [ { "event": "At the beginning of the video, the camera focuses on a curly-haired male who is sleeping. The male rests his head on his left arm, tilted to one side. He wears a dirty T-shirt. Suddenly, the male opens his eyes, turns his head to the right, and lifts it to look outside. In the background, a checkered bedsheet is visible.", "visual_elements": [ { "content": "The camera focuses on a curly-haired male who is sleeping.", "type": "camera", "weight": 3 }, { "content": "The male rests his head on his left arm, tilted to one side.", "type": "attribute", "weight": 2 }, { "content": "The male wears a dirty T-shirt.", "type": "attribute", "weight": 2 }, { "content": "Suddenly, the male opens his eyes, turns his head to the right, and lifts it to look outside.", "type": "action", "weight": 3 }, { "content": "In the background, a checkered bedsheet is visible.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera cuts to an exterior view of a camping tent. The tent is yellow, surrounded by scattered rocks and vegetation. The curly-haired man pulls open the tent flap and steps out, followed by another man wearing a gray shirt and jeans. Soon after, a woman in a red camisole also emerges from the tent. The three of them stand in front of the tent, gazing ahead.", "visual_elements": [ { "content": "The camera cuts to an exterior view of a camping tent.", "type": "camera", "weight": 3 }, { "content": "The tent is yellow, surrounded by scattered rocks and vegetation.", "type": "attribute", "weight": 2 }, { "content": "The curly-haired man pulls open the tent flap and steps out.", "type": "action", "weight": 3 }, { "content": "Another man, dressed in a gray shirt and jeans, subsequently steps out of the tent.", "type": "action", "weight": 3 }, { "content": "Soon after, a woman in a red camisole also emerges from the tent.", "type": "action", "weight": 3 }, { "content": "The three of them stand in front of the tent, gazing ahead.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to an open field where a woman, covered in blood all over her body, stands with her back to the camera. The woman wears a white tank top and white panties, and she bents over, dealing with the bloodstains on her body. In front of her stands a stone wall. In the background, therera are see distant and rolling green mountains against a clear, sunny sky.", "visual_elements": [ { "content": "The camera cuts to an open field.", "type": "camera", "weight": 3 }, { "content": "On the open ground stands a woman covered in blood from head to toe.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a white tank top and white panties", "type": "attribute", "weight": 2 }, { "content": "The woman bents over, dealing with the bloodstains on her body.", "type": "action", "weight": 3 }, { "content": "In front of the woman is a stone wall.", "type": "attribute", "weight": 2 }, { "content": "In the background, therera are see distant and rolling green mountains.", "type": "scene", "weight": 2 }, { "content": "The weather is clear and sunny.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0389", "video_path": "VELOCITI/v_VtEOGd7tD9o_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young man wearing a stained T-shirt, which features a large circular pattern. The man looks down at his chest. In the background, a camping tent and some trees are visible. Standing to the man's right rear is a woman dressed in a red tank top and khaki pants. She has her mouth open as she looks at the man. In the foreground, on the right side of the frame, there is a person wearing a white tank top and white underwear facing away from the camera. This person's arms and legs are covered in blood.\nThe camera cuts to a blonde woman with disheveled hair and bloodstains all over her face.\nSubsequently, the camera cuts back to the man once more. A short dagger is embedded in his chest, and his body gradually leans to his left, collapsing. As the man falls, a person wearing jeans moves around near the fallen man. The camera follows the man's collapse, panning downwards to the right.\nThe camera cuts to the upper torso of a man wearing a gray shirt. The man bends down and squats, and the camera follows his movement downwards.\nThe camera cuts back to the blonde woman who is covered in blood.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young man wearing a stained T-shirt, which features a large circular pattern. The man looks down at his chest. In the background, a camping tent and some trees are visible. Standing to the man's right rear is a woman dressed in a red tank top and khaki pants. She has her mouth open as she looks at the man. In the foreground, on the right side of the frame, there is a person wearing a white tank top and white underwear facing away from the camera. This person's arms and legs are covered in blood.", "visual_elements": [ { "content": "The camera focuses on a young man wearing a stained T-shirt.", "type": "camera", "weight": 3 }, { "content": "The man's T-shirt has a large circular pattern on it.", "type": "attribute", "weight": 2 }, { "content": "The man looks down at his chest.", "type": "action", "weight": 3 }, { "content": "In the background, a camping tent and some trees are visible.", "type": "scene", "weight": 2 }, { "content": "Standing to the man's right rear is a woman dressed in a red tank top and khaki pants.", "type": "attribute", "weight": 3 }, { "content": "The woman has her mouth open as she looks at the man.", "type": "attribute", "weight": 2 }, { "content": "In the foreground, on the right side of the frame, there is a person wearing a white tank top and white underwear facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "This person's arms and legs are covered in blood.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a blonde woman with disheveled hair and bloodstains all over her face.", "visual_elements": [ { "content": "The camera cuts to a blonde woman with disheveled hair and bloodstains all over her face.", "type": "camera", "weight": 3 } ] }, { "event": "Subsequently, the camera cuts back to the man once more. A short dagger is embedded in his chest, and his body gradually leans to his left, collapsing. As the man falls, a person wearing jeans moves around near the fallen man. The camera follows the man's collapse, panning downwards to the right.", "visual_elements": [ { "content": "The camera cuts back to the man once more.", "type": "camera", "weight": 3 }, { "content": "A short dagger is embedded in the man'schest.", "type": "attribute", "weight": 3 }, { "content": "The man's body gradually leans to his left, collapsing.", "type": "action", "weight": 3 }, { "content": "As the man falls, a person wearing jeans moves around near the fallen man.", "type": "scene", "weight": 3 }, { "content": "The camera follows the man's collapse, panning downwards to the right.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to the upper torso of a man wearing a gray shirt. The man bends down and squats, and the camera follows his movement downwards.", "visual_elements": [ { "content": "The camera cuts to the upper torso of a man wearing a gray shirt.", "type": "camera", "weight": 3 }, { "content": "The man bends down.", "type": "action", "weight": 3 }, { "content": "The camera follows his movement downwards.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts back to the blonde woman who is covered in blood.", "visual_elements": [ { "content": "The camera cuts back to the blonde woman who is covered in blood.", "type": "camera", "weight": 3 } ] } ], "n_events": 5, "n_elements": 18 }, { "index": "TUNA_0390", "video_path": "VELOCITI/v_W4_F1oMTEFc_seg_40_50.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "In the video, the camera focuses on the side face of a blonde woman. Her hair is wet. She turns her head to the left and raises her right hand, which holds a cigarette. Then she turns her head to the right, looking up towards the top right corner. She turns around as she speaks, and the camera follows her upwards. She wears a yellow off-the-shoulder top. After taking a puff of the cigarette in her right hand, she extends it towards the right side of the frame. The background is blurred, revealing a swimming pool.\nThe camera cuts to a male character. The male wears a sleeveless vest and a cross necklace around his neck. He lowers his head to accept a cigarette handed to him by a female. The background is a dark night.", "events": [ { "event": "In the video, the camera focuses on the side face of a blonde woman. Her hair is wet. She turns her head to the left and raises her right hand, which holds a cigarette. Then she turns her head to the right, looking up towards the top right corner. She turns around as she speaks, and the camera follows her upwards. She wears a yellow off-the-shoulder top. After taking a puff of the cigarette in her right hand, she extends it towards the right side of the frame. The background is blurred, revealing a swimming pool.", "visual_elements": [ { "content": "the camera focuses on the side face of a blonde woman.", "type": "camera", "weight": 3 }, { "content": "The woman's hair is wet.", "type": "attribute", "weight": 2 }, { "content": "The woman turns her head to the left and raises her right hand, which holds a cigarette.", "type": "action", "weight": 2 }, { "content": "The woman turns her head to the right, looking up towards the top right corner.", "type": "action", "weight": 3 }, { "content": "The woman turns around as she speaks", "type": "action", "weight": 3 }, { "content": "The camera follows the woman upwards.", "type": "camera", "weight": 3 }, { "content": "The woman wears a yellow off-the-shoulder top.", "type": "attribute", "weight": 2 }, { "content": "After taking a puff of the cigarette in her right hand, the woman extends it towards the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The background is blurred, revealing a swimming pool.", "type": "scene", "weight": 1 } ] }, { "event": "The camera cuts to a male character. The male wears a sleeveless vest and a cross necklace around his neck. He lowers his head to accept a cigarette handed to him by a female. The background is a dark night.", "visual_elements": [ { "content": "The camera cuts to a male character.", "type": "camera", "weight": 3 }, { "content": "The male wears a sleeveless vest and a cross necklace around his neck.", "type": "attribute", "weight": 2 }, { "content": "The man lowers his head to accept a cigarette handed to him by a female.", "type": "action", "weight": 3 }, { "content": "The background is a dark night.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0391", "video_path": "VELOCITI/v_W8AmENQ7_ik_seg_30_40.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the side face of a man in a dark jacket, with a room filled with green glow in the background. The walls and ceiling of the room are covered with intricate patterns of circuit boards. The man suddenly turns to the right, facing the camera, with a flashlight in his right hand. The camera pans back and the man turns back and then turns around. The flashlight held by the man moves with the man's movements.\nNext, the camera cuts to a black man in a dark uniform. The black man wears uniform with multiple badges and insignias, including the American flag and other logos. The black man leans over, looks ahead and speaks.\nThe camera cuts again to reveal a complex mechanical instrument with an entrance leading downward within the frame. The black man stands with his hands on either side of the entrance to the complex mechanical instrument and looks inside. Suddenly, the black man turns his head and looks towards his left back. Then, the black man pulls his left hand back and leans to his left back.", "events": [ { "event": "The video begins with the camera focusing on the side face of a man in a dark jacket, with a room filled with green glow in the background. The walls and ceiling of the room are covered with intricate patterns of circuit boards. The man suddenly turns to the right, facing the camera, with a flashlight in his right hand. The camera pans back and the man turns back and then turns around. The flashlight held by the man moves with the man's movements.", "visual_elements": [ { "content": "The camera focuses on the side face of a man in a dark jacket.", "type": "camera", "weight": 3 }, { "content": "The background features a room filled with green glow.", "type": "scene", "weight": 2 }, { "content": "The walls and ceiling of the room are covered with intricate patterns of circuit boards.", "type": "scene", "weight": 2 }, { "content": "The man suddenly turns to the right, facing the camera.", "type": "action", "weight": 3 }, { "content": "The man holds a lit flashlight in his right hand.", "type": "attribute", "weight": 3 }, { "content": "The camera pans back.", "type": "camera", "weight": 3 }, { "content": "The man turns back and then turns around.", "type": "action", "weight": 3 }, { "content": "The flashlight held by the man moves with the man's movements.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a black man in a dark uniform. The black man wears uniform with multiple badges and insignias, including the American flag and other logos. The black man leans over, looks ahead and speaks.", "visual_elements": [ { "content": "The camera cuts to a black man in a dark uniform.", "type": "camera", "weight": 3 }, { "content": "The black man wears uniform with multiple badges and insignias, including the American flag and other logos.", "type": "attribute", "weight": 2 }, { "content": "The black man leans over, looks ahead and speaks.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again to reveal a complex mechanical instrument with an entrance leading downward within the frame. The black man stands with his hands on either side of the entrance to the complex mechanical instrument and looks inside. Suddenly, the black man turns his head and looks towards his left back. Then, the black man pulls his left hand back and leans to his left back.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "A complex mechanical instrument with an entrance leads downward within the frame.", "type": "attribute", "weight": 2 }, { "content": "The black man stands with his hands on either side of the entrance to the complex mechanical instrument and looks inside.", "type": "attribute", "weight": 3 }, { "content": "The black man turns his head and looks towards his left back.", "type": "action", "weight": 3 }, { "content": "The black man pulls his left hand back and leans to his left back.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0392", "video_path": "VELOCITI/v_WVed9LPelUw_seg_130_140.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a scene resembling a reading room, with two paintings of naked women on the left of the frame and a bookshelf lined with books on the right. A woman walks from behind the painting to the bookshelf. The woman wears a plaid shirt and jeans. The woman takes down a book from the shelf. The woman opens the book and looks down to read, and the woman looks forward while reading. The camera follows the woman's movement, turning to the right.", "events": [ { "event": "The camera focuses on a scene resembling a reading room, with two paintings of naked women on the left of the frame and a bookshelf lined with books on the right. A woman walks from behind the painting to the bookshelf. The woman wears a plaid shirt and jeans. The woman takes down a book from the shelf. The woman opens the book and looks down to read, and the woman looks forward while reading. The camera follows the woman's movement, turning to the right.", "visual_elements": [ { "content": "The camera focuses on a scene resembling a reading room.", "type": "camera", "weight": 3 }, { "content": "Two paintings of naked women are on the left of the frame.", "type": "attribute", "weight": 2 }, { "content": "A bookshelf is lined with books on the right.", "type": "attribute", "weight": 2 }, { "content": "A woman walks from behind the painting to the bookshelf.", "type": "action", "weight": 3 }, { "content": "The woman wears a plaid shirt and jeans.", "type": "attribute", "weight": 2 }, { "content": "The woman takes down a book from the shelf.", "type": "action", "weight": 3 }, { "content": "The woman opens the book and looks down to read.", "type": "action", "weight": 3 }, { "content": "The woman looks forward while reading.", "type": "action", "weight": 3 }, { "content": "The camera follows the woman's movement, turning to the right.", "type": "camera", "weight": 3 } ] } ], "n_events": 1, "n_elements": 9 }, { "index": "TUNA_0393", "video_path": "VELOCITI/v_WVed9LPelUw_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a man and a woman. The man on the left is wearing glasses and a black suit. The woman on the right is wearing a plaid shirt. The man is talking and the woman is listening with her arms crossed over her chest. A nude sketch, a white column and a bookshelf can be seen in the background. The woman suddenly opens her hands and raises them, walking forward as she speak. The camera follows the woman to the right. The woman comes to a black bookcase, first turning to look behind, and then taking out a book from the bookcase. The woman looks down at the book and turns to her left behind.", "events": [ { "event": "The video begins with the camera focusing on a man and a woman. The man on the left is wearing glasses and a black suit. The woman on the right is wearing a plaid shirt. The man is talking and the woman is listening with her arms crossed over her chest. A nude sketch, a white column and a bookshelf can be seen in the background. The woman suddenly opens her hands and raises them, walking forward as she speak. The camera follows the woman to the right. The woman comes to a black bookcase, first turning to look behind, and then taking out a book from the bookcase. The woman looks down at the book and turns to her left behind.", "visual_elements": [ { "content": "The camera focuses on a man and a woman.", "type": "camera", "weight": 3 }, { "content": "The man on the left is wearing glasses and a black suit.", "type": "attribute", "weight": 3 }, { "content": "The woman on the right is wearing a plaid shirt.", "type": "attribute", "weight": 3 }, { "content": "The man is talking and the woman is listening with her arms crossed over her chest.", "type": "action", "weight": 3 }, { "content": "A nude sketch, a white column and a bookshelf can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The woman suddenly opens her hands and raises them, walking forward as she speak.", "type": "action", "weight": 3 }, { "content": "The camera follows the woman to the right.", "type": "camera", "weight": 3 }, { "content": "The woman comes to a black bookcase.", "type": "attribute", "weight": 3 }, { "content": "The woman first turns to look behind, and then takes out a book from the bookcase.", "type": "action", "weight": 3 }, { "content": "The woman looks down at the book and turns to her left behind.", "type": "action", "weight": 3 } ] } ], "n_events": 1, "n_elements": 10 }, { "index": "TUNA_0394", "video_path": "VELOCITI/v_WsgkiKu7AO8_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a man wearing blue striped pajamas, who walks to the left of the frame and then stops at the landing. The camera follows the male to the left. The background is a dim interior environment with a painting hanging on the wall next to the stairs. The man walks up the stairs and looks at the picture hanging on the wall. The man touches the picture with his hand and walks up the stairs.\nThe camera cuts to a woman standing next to the bed in her pajamas. The woman bends down to cover the person on the bed. With the camera zooming in and moving down, the woman leans over to kiss the child in the bed.", "events": [ { "event": "The video begins with the camera focusing on a man wearing blue striped pajamas, who walks to the left of the frame and then stops at the landing. The camera follows the male to the left. The background is a dim interior environment with a painting hanging on the wall next to the stairs. The man walks up the stairs and looks at the picture hanging on the wall. The man touches the picture with his hand and walks up the stairs.", "visual_elements": [ { "content": "The camera focuses on a man wearing blue striped pajamas.", "type": "camera", "weight": 3 }, { "content": "The man walks to the left of the frame and then stops at the landing.", "type": "action", "weight": 3 }, { "content": "The camera follows the male to the left.", "type": "camera", "weight": 3 }, { "content": "The background is a dim interior environment.", "type": "scene", "weight": 2 }, { "content": "A painting hangs on the wall next to the stairs.", "type": "attribute", "weight": 2 }, { "content": "The man walks up the stairs and looks at the picture hanging on the wall.", "type": "action", "weight": 3 }, { "content": "The man touches the picture with his hand and walks up the stairs.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a woman standing next to the bed in her pajamas. The woman bends down to cover the person on the bed. With the camera zooming in and moving down, the woman leans over to kiss the child in the bed.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A woman stands next to the bed in her pajamas.", "type": "attribute", "weight": 3 }, { "content": "The woman bends down to cover the person on the bed.", "type": "action", "weight": 3 }, { "content": "The camera zooms in and moves down.", "type": "camera", "weight": 3 }, { "content": "The woman leans over to kiss the child in the bed.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0395", "video_path": "VELOCITI/v_Wy6ANzdhy1s_seg_95_105.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the scene focuses on a narrow train compartment. There are three men inside: two of them are lying on the beds, and the third one is crouching beside the bed. The man on the left bed has his head wrapped in white bandages and is wearing dark clothes, while the other man is lying naked from the waist up. The man crouching beside the bed is wearing a gray suit and holding a small red bottle in his hand. The man in the gray suit is shouting loudly at the two men on the bed. The two men on the bed are struggling. The compartment is adorned with wallpaper featuring blue and orange patterns, and the curtains are blue and white striped.\nThe camera cuts to the corridor outside the private room where a man in a gray suit opens the door and steps out. He first turns to speak to someone inside the room before turning and dashing down the corridor. As he runs, the man in the gray suit keeps glancing back. In the background, people from other private rooms peek out to see the commotion, quickly slamming their doors shut as the man in the gray suit passes by. The walls outside the private rooms are adorned with animal patterns. The corridor carpet is dark blue. The doors to the private rooms are blue, and the walls have a wooden hue. The camera follows the man in the gray suit as he moves forward. He reaches the end of the corridor, stops, and bends down to try opening the door there.\nThe camera cuts, and the man in a gray suit turns around to look ahead. The private room is wrapes in white bandages, and man and shirtless man gradually comes out of the room. They runs towards the gray suit man.\nFinally, the camera returns to the front view of the man in a gray suit.", "events": [ { "event": "At the beginning of the video, the scene focuses on a narrow train compartment. There are three men inside: two of them are lying on the beds, and the third one is crouching beside the bed. The man on the left bed has his head wrapped in white bandages and is wearing dark clothes, while the other man is lying naked from the waist up. The man crouching beside the bed is wearing a gray suit and holding a small red bottle in his hand. The man in the gray suit is shouting loudly at the two men on the bed. The two men on the bed are struggling. The compartment is adorned with wallpaper featuring blue and orange patterns, and the curtains are blue and white striped.", "visual_elements": [ { "content": "The screen focuses on a narrow train compartment.", "type": "attribute", "weight": 3 }, { "content": "There are three men inside: two of them are lying on the beds, and the third one is crouching beside the bed.", "type": "attribute", "weight": 3 }, { "content": "The man on the left bed has his head wrapped in white bandages and is wearing dark clothes, while the other man is lying naked from the waist up.", "type": "attribute", "weight": 2 }, { "content": "The man crouching beside the bed is wearing a gray suit and holding a small red bottle in his hand.", "type": "attribute", "weight": 2 }, { "content": "The man in the gray suit is shouting loudly at the two men on the bed.", "type": "action", "weight": 3 }, { "content": "The two men on the bed are struggling.", "type": "action", "weight": 3 }, { "content": "The compartment is adorned with wallpaper featuring blue and orange patterns.", "type": "scene", "weight": 2 }, { "content": "The curtains are blue and white striped.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to the corridor outside the private room where a man in a gray suit opens the door and steps out. He first turns to speak to someone inside the room before turning and dashing down the corridor. As he runs, the man in the gray suit keeps glancing back. In the background, people from other private rooms peek out to see the commotion, quickly slamming their doors shut as the man in the gray suit passes by. The walls outside the private rooms are adorned with animal patterns. The corridor carpet is dark blue. The doors to the private rooms are blue, and the walls have a wooden hue. The camera follows the man in the gray suit as he moves forward. He reaches the end of the corridor, stops, and bends down to try opening the door there.", "visual_elements": [ { "content": "The camera cuts to the corridor outside the private room.", "type": "camera", "weight": 3 }, { "content": "A man in a gray suit opens the door and steps out.", "type": "action", "weight": 3 }, { "content": "The man in a gray first turns to speak to someone inside the room before turning and dashing down the corridor.", "type": "action", "weight": 3 }, { "content": "As he runs, the man in the gray suit keeps glancing back.", "type": "action", "weight": 3 }, { "content": "In the background, people from other private rooms peek out to see the commotion.", "type": "scene", "weight": 2 }, { "content": "The people in the other private rooms quickly slamming their doors shut as the man in the gray suit passes by.", "type": "scene", "weight": 2 }, { "content": "The walls outside the private rooms are adorned with animal patterns.", "type": "scene", "weight": 1 }, { "content": "The corridor carpet is dark blue.", "type": "scene", "weight": 2 }, { "content": "The doors to the private rooms are blue, and the walls have a wooden hue.", "type": "scene", "weight": 2 }, { "content": "The camera follows the man in the gray suit as he moves forward.", "type": "camera", "weight": 3 }, { "content": "The man in a gray suit reaches the end of the corridor, stops.", "type": "action", "weight": 3 }, { "content": "The man in a gray suit bends down to try opening the door there.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts, and the man in a gray suit turns around to look ahead. The private room is wrapes in white bandages, and man and shirtless man gradually comes out of the room. They runs towards the gray suit man.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man in a gray suit turns around to look ahead.", "type": "action", "weight": 2 }, { "content": "The private room is wrapes in white bandages, and man and shirtless man gradually comes out of the room.", "type": "action", "weight": 3 }, { "content": "Two men runs towards the gray suit man.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera returns to the front view of the man in a gray suit.", "visual_elements": [ { "content": "Finally, the camera returns to the front view of the man in a gray suit.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0396", "video_path": "VELOCITI/v_WzBS3IIb-vg_seg_30_40.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a room with two beds and two wall lamps on the bedside tables. A young man dresses in black sits on the edge of the bed on the left side of the screen, bending down to put on his shoes. Another young man, wearing camouflage pants and a black vest, sits on the bed on the right side of the screen. The man in the black vest is putting on an orange hoodie. The beds are covers with pink quilts adorns with blue stripes.\nThe camera cuts to a wider view of the room, revealing that there are two more men standing at the foot of the bed on the left side of the screen. On the left stands a bald man wearing a pink shirt, and on the right stands a man in a brown leather jacket. Suddenly, all four men in the room stop what they are doing and turn their heads to look toward the doorway. The man in the brown leather jacket walks toward the door.\nThe camera cuts to a view of the man in the brown leather jacket holding a beige suitcase as he approaches the doorway. He peers through the peephole to look outside.\nThe camera cuts to the view from the peephole, revealing a hand covering it from the outside.", "events": [ { "event": "At the beginning of the video, the camera focuses on a room with two beds and two wall lamps on the bedside tables. A young man dresses in black sits on the edge of the bed on the left side of the screen, bending down to put on his shoes. Another young man, wearing camouflage pants and a black vest, sits on the bed on the right side of the screen. The man in the black vest is putting on an orange hoodie. The beds are covers with pink quilts adorns with blue stripes.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on a room.", "type": "camera", "weight": 3 }, { "content": "There are two beds in the room.", "type": "attribute", "weight": 2 }, { "content": "There are two wall lamps on the bedside tables.", "type": "scene", "weight": 2 }, { "content": "A young man dresses in black sits on the edge of the bed on the left side of the screen", "type": "attribute", "weight": 3 }, { "content": "The young man is bending down to put on his shoes.", "type": "action", "weight": 3 }, { "content": "Another young man, wearing camouflage pants and a black vest, sits on the bed on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The man in the black vest is putting on an orange hoodie.", "type": "action", "weight": 3 }, { "content": "The beds are covers with pink quilts adorns with blue stripes.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a wider view of the room, revealing that there are two more men standing at the foot of the bed on the left side of the screen. On the left stands a bald man wearing a pink shirt, and on the right stands a man in a brown leather jacket. Suddenly, all four men in the room stop what they are doing and turn their heads to look toward the doorway. The man in the brown leather jacket walks toward the door.", "visual_elements": [ { "content": "The camera cuts to a wider view of the room.", "type": "camera", "weight": 3 }, { "content": "There are two more men standing at the foot of the bed on the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "On the left stands a bald man wearing a pink shirt.", "type": "attribute", "weight": 3 }, { "content": "On the right stands a man in a brown leather jacket.", "type": "attribute", "weight": 3 }, { "content": "Suddenly, all four men in the room stop what they are doing and turn their heads to look toward the doorway.", "type": "action", "weight": 3 }, { "content": "The man in the brown leather jacket walks toward the door.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a view of the man in the brown leather jacket holding a beige suitcase as he approaches the doorway. He peers through the peephole to look outside.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man in the brown leather jacket holding a beige suitcase as he approaches the doorway.", "type": "action", "weight": 3 }, { "content": "The man in the brown leather jacket through the peephole to look outside.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to the view from the peephole, revealing a hand covering it from the outside.", "visual_elements": [ { "content": "The camera cuts to the view from the peephole.", "type": "camera", "weight": 3 }, { "content": "Revealing a hand covering it from the outside.", "type": "attribute", "weight": 3 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0397", "video_path": "VELOCITI/v_WzBS3IIb-vg_seg_80_90.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the hands of a person wearing a brown leather jacket and a floral shirt. The person in the brown leather jacket is taking money out of a wallet held in their hand. To the left of the person in the brown leather jacket is someone wearing a black coat and white underwear, holding a wallet in their hand. The person in the black coat passes the money to the person in the brown leather jacket.\nThe camera then cuts to a view of the door of a room, with an advertisement paper sticks to it from behind. The metal door lock on the door is loose, and a metal piece is turning.\nThe camera pans to the four men inside the room. The man on the far left is wearing a brown leather jacket and is counting cash in his hand with a lowered posture. To the left of the man in the brown leather jacket is a man wearing a black coat and white undergarments. To the left of the man in the black coat is a black man wearing a pink shirt. On the far right is a young man wearing an orange hoodie.\nThe camera cuts once again to the door, revealing that the door lock has been tampered with or destroyed.\nThen, the camera cuts to the face of the man in the orange hoodie. He turns and walks to his right. The camera follows the man in the orange hoodie as he moves down and to the left, handing something in his hand to the man in the brown leather jacket who is crouching on the ground. The man in the brown leather jacket then slips something under the gap at the bottom of the door on his right side.\nThe camera once again zooms in on the face of the man in the orange hoodie. He looks straight ahead with his mouth wide open.\nThe camera cuts to a view of a stack of money being pushed out from under the door gap. The floor is covered with a dark gray carpet.", "events": [ { "event": "At the beginning of the video, the camera focuses on the hands of a person wearing a brown leather jacket and a floral shirt. The person in the brown leather jacket is taking money out of a wallet held in their hand. To the left of the person in the brown leather jacket is someone wearing a black coat and white underwear, holding a wallet in their hand. The person in the black coat passes the money to the person in the brown leather jacket.", "visual_elements": [ { "content": "The camera focuses on the hands of a person wearing a brown leather jacket and a floral shirt.", "type": "camera", "weight": 3 }, { "content": "The person in the brown leather jacket is taking money out of a wallet held in their hand.", "type": "action", "weight": 3 }, { "content": "The left of the person in the brown leather jacket is someone wearing a black coat and white underwear, holding a wallet in their hand.", "type": "attribute", "weight": 3 }, { "content": "The person in the black coat passes the money to the person in the brown leather jacket.", "type": "action", "weight": 2 } ] }, { "event": "The camera then cuts to a view of the door of a room, with an advertisement paper sticks to it from behind. The metal door lock on the door is loose, and a metal piece is turning.", "visual_elements": [ { "content": "The camera then cuts to a view of the door of a room.", "type": "camera", "weight": 3 }, { "content": "there is an advertisement paper sticks to the door from behind.", "type": "attribute", "weight": 2 }, { "content": "The metal door lock on the door is loose, and a metal piece is turning.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans to the four men inside the room. The man on the far left is wearing a brown leather jacket and is counting cash in his hand with a lowered posture. To the left of the man in the brown leather jacket is a man wearing a black coat and white undergarments. To the left of the man in the black coat is a black man wearing a pink shirt. On the far right is a young man wearing an orange hoodie.", "visual_elements": [ { "content": "The camera pans to the four men inside the room.", "type": "camera", "weight": 3 }, { "content": "The man on the far left is wearing a brown leather jacket and is counting cash in his hand with a lowered posture.", "type": "attribute", "weight": 3 }, { "content": "To the left of the man in the brown leather jacket is a man wearing a black coat and white undergarments.", "type": "attribute", "weight": 3 }, { "content": "To the left of the man in the black coat is a black man wearing a pink shirt.", "type": "attribute", "weight": 3 }, { "content": "On the far right is a young man wearing an orange hoodie.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts once again to the door, revealing that the door lock has been tampered with or destroyed.", "visual_elements": [ { "content": "The camera cuts once again to the door.", "type": "camera", "weight": 3 }, { "content": "The door lock has been tampered with or destroyed.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, the camera cuts to the face of the man in the orange hoodie. He turns and walks to his right. The camera follows the man in the orange hoodie as he moves down and to the left, handing something in his hand to the man in the brown leather jacket who is crouching on the ground. The man in the brown leather jacket then slips something under the gap at the bottom of the door on his right side.", "visual_elements": [ { "content": "Then, the camera cuts to the face of the man in the orange hoodie.", "type": "camera", "weight": 3 }, { "content": "The man in the orange hoodie turns and walks to his right.", "type": "action", "weight": 3 }, { "content": "The camera follows the man in the orange hoodie as he moves down and to the left.", "type": "camera", "weight": 3 }, { "content": "The man in the orange hoodie handing something in his hand to the man in the brown leather jacket who is crouching on the ground.", "type": "action", "weight": 3 }, { "content": "he man in the brown leather jacket then slips something under the gap at the bottom of the door on his right side.", "type": "action", "weight": 2 } ] }, { "event": "The camera once again zooms in on the face of the man in the orange hoodie. He looks straight ahead with his mouth wide open.", "visual_elements": [ { "content": "The camera once again zooms in on the face of the man in the orange hoodie.", "type": "camera", "weight": 3 }, { "content": "The man in the orange hoodie looks straight ahead with his mouth wide open.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to a view of a stack of money being pushed out from under the door gap. The floor is covered with a dark gray carpet.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A stack of money being pushed out from under the door gap.", "type": "action", "weight": 3 }, { "content": "The floor is covered with a dark gray carpet.", "type": "scene", "weight": 2 } ] } ], "n_events": 7, "n_elements": 24 }, { "index": "TUNA_0398", "video_path": "VELOCITI/v_XN7uqQnAxIc_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a male wearing a black leather jacket and a female wearing a black bikini. The background is a dimly lit scene with some holes and graffiti on the walls.\nThe camera cuts to a view of a woman with a black collar around her neck, dancing while holding onto a metal pole.\nThe camera cuts again to a shirtless bald man gripping the railing in front of him, wildly shaking his body. In the background, many other people are waving their arms and shouting.\nThe camera cuts to a male dresses in a black coat and jeans, with his arms spread wide, standing in the center of a stage. The male's coat is open, revealing his bare upper torso. In front of the male is a stand-up microphone. The background features a white wall with black graffiti.\nThe camera cuts to a group of people, some wearing shirts and some shirtless, cheering and dancing.\nThe camera cuts to a view behind the male standing in the center of the stage. A female dressed in a black bikini stands to the left of the male. She wraps her arms around his shoulders and dances close to him. In front of the stage is an open space, and behind this space are many spectators standing.\nThe camera cuts to a side view of the male standing in the center of the stage. He has a female on each side of him, and all three of them are engaged in close and passionate dancing.\nThe camera cuts to a view behind the three individuals dancing passionately in the center of the stage.\nThe camera cuts to a frontal view of the male standing in the center of the stage. The female on his left side squats down and exits the frame from the bottom. The male has his mouth wide open, expressing surprise.\nThe camera cuts to several females who are only wearing underwear on their upper torsos while dancing.\nThe camera cuts back to a side view of the stage. Both females on either side of the male stand up, entering the frame from below. The male pushes both females in front of him and presses on their backs, causing them to bend forward.\nThe camera cuts to a view of the male's back, with his arms stretches out and raises high.\nThe camera cuts to a frontal view of the male, who is looking up and roaring loudly. Subsequently, he lowers his raises arms. In front of him are the two females who are still bent over.", "events": [ { "event": "At the beginning of the video, the camera focuses on a male wearing a black leather jacket and a female wearing a black bikini. The background is a dimly lit scene with some holes and graffiti on the walls.", "visual_elements": [ { "content": "The camera focuses on a male wearing a black leather jacket and a female wearing a black bikini.", "type": "camera", "weight": 3 }, { "content": "The background is a dimly lit scene with some holes and graffiti on the walls.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a view of a woman with a black collar around her neck, dancing while holding onto a metal pole.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A woman with a black collar around her neck, dancing while holding onto a metal pole.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again to a shirtless bald man gripping the railing in front of him, wildly shaking his body. In the background, many other people are waving their arms and shouting.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A shirtless bald man gripping the railing in front of him, wildly shaking his body.", "type": "action", "weight": 3 }, { "content": "In the background, many other people are waving their arms and shouting.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a male dresses in a black coat and jeans, with his arms spread wide, standing in the center of a stage. The male's coat is open, revealing his bare upper torso. In front of the male is a stand-up microphone. The background features a white wall with black graffiti.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A male dresses in a black coat and jeans, with his arms spread wide, standing in the center of a stage.", "type": "attribute", "weight": 3 }, { "content": "The male's coat is open, revealing his bare upper torso.", "type": "attribute", "weight": 2 }, { "content": "In front of the male is a stand-up microphone.", "type": "attribute", "weight": 2 }, { "content": "The background features a white wall with black graffiti.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a group of people, some wearing shirts and some shirtless, cheering and dancing.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A group of people, some wearing shirts and some shirtless, cheering and dancing.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a view behind the male standing in the center of the stage. A female dressed in a black bikini stands to the left of the male. She wraps her arms around his shoulders and dances close to him. In front of the stage is an open space, and behind this space are many spectators standing.", "visual_elements": [ { "content": "The camera cuts to a view behind the male standing in the center of the stage.", "type": "camera", "weight": 3 }, { "content": "A female dressed in a black bikini stands to the left of the male.", "type": "attribute", "weight": 3 }, { "content": "The female wraps her arms around his shoulders and dances close to him.", "type": "action", "weight": 3 }, { "content": "n front of the stage is an open space.", "type": "scene", "weight": 2 }, { "content": "behind the open space are many spectators standing.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a side view of the male standing in the center of the stage. He has a female on each side of him, and all three of them are engaged in close and passionate dancing.", "visual_elements": [ { "content": "The camera cuts to a side view of the male standing in the center of the stage.", "type": "camera", "weight": 3 }, { "content": "The male has a female on each side of him.", "type": "attribute", "weight": 3 }, { "content": "All three of them are engaged in close and passionate dancing.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a view behind the three individuals dancing passionately in the center of the stage.", "visual_elements": [ { "content": "The camera cuts to a view behind the three individuals dancing passionately in the center of the stage.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts to a frontal view of the male standing in the center of the stage. The female on his left side squats down and exits the frame from the bottom. The male has his mouth wide open, expressing surprise.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the male standing in the center of the stage.", "type": "camera", "weight": 3 }, { "content": "The female on the male's left side squats down and exits the frame from the bottom.", "type": "action", "weight": 3 }, { "content": "The male has his mouth wide open, expressing surprise.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to several females who are only wearing underwear on their upper torsos while dancing.", "visual_elements": [ { "content": "The camera cuts to several females who are only wearing underwear on their upper torsos while dancing.", "type": "camera", "weight": 3 } ] }, { "event": "The camera cuts back to a side view of the stage. Both females on either side of the male stand up, entering the frame from below. The male pushes both females in front of him and presses on their backs, causing them to bend forward.", "visual_elements": [ { "content": "The camera cuts back to a side view of the stage.", "type": "camera", "weight": 3 }, { "content": "Both females on either side of the male stand up, entering the frame from below.", "type": "action", "weight": 3 }, { "content": "The male pushes both females in front of him and presses on their backs, causing them to bend forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a view of the male's back, with his arms stretches out and raises high.", "visual_elements": [ { "content": "The camera cuts to a view of the male's back", "type": "camera", "weight": 3 }, { "content": "The male's arms stretches out and raises high.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera cuts to a frontal view of the male, who is looking up and roaring loudly. Subsequently, he lowers his raises arms. In front of him are the two females who are still bent over.", "visual_elements": [ { "content": "The camera cuts to a frontal view of the male.", "type": "camera", "weight": 3 }, { "content": "The male is looking up and roaring loudly.", "type": "action", "weight": 3 }, { "content": "The male lowers his raises arms.", "type": "action", "weight": 2 }, { "content": "In front of the male are the two females who are still bent over.", "type": "attribute", "weight": 2 } ] } ], "n_events": 13, "n_elements": 36 }, { "index": "TUNA_0399", "video_path": "VELOCITI/v_XPUqjed6k4s_seg_0_10.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a ticket booth, where a male dresses in a red coat with a light yellow undershirt is seated. The male holds a desktop microphone in both hands and speaks into it. In the background, a clock and a bus schedule can be seen.\nThe camera cuts to a black police officer dresses in a dark green uniform, wearing sunglasses and a wide-brimmed hat, sitting on a chair in a waiting area and smoking a cigarette. The black male turns his head to the left. In the background, there are lockers and bookshelves, with a \"NO SMOKING\" sign posted on the wall.\nThe camera cuts back to the ticket booth, where the male in the red coat is now closing his eyes, supporting his head with his right hand placed on his forehead. The male in the red coat opens his eyes and looks forward, lowering his right hand.\nThe camera cuts once again to the black police officer, who is holding a cigarette in his right hand, turning his head to the right, and leaning forward to speak.\nThe camera cuts to a wider angle, showing a panoramic view of the waiting area. To the right of the black police officer, separated by one seat, sits a male dresses in a white shirt and dark suit pants. The male in the white shirt holds something in his right hand and a cup of drink in his left hand. He looks towards his left front and takes a sip from his drink.", "events": [ { "event": "At the beginning of the video, the camera focuses on a ticket booth, where a male dresses in a red coat with a light yellow undershirt is seated. The male holds a desktop microphone in both hands and speaks into it. In the background, a clock and a bus schedule can be seen.", "visual_elements": [ { "content": "The camera focuses on a ticket booth.", "type": "camera", "weight": 3 }, { "content": "A male dresses in a red coat with a light yellow undershirt is seated.", "type": "attribute", "weight": 3 }, { "content": "The male holds a desktop microphone in both hands.", "type": "attribute", "weight": 2 }, { "content": "The male speaks into microphone.", "type": "action", "weight": 3 }, { "content": "In the background, a clock and a bus schedule can be seen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a black police officer dresses in a dark green uniform, wearing sunglasses and a wide-brimmed hat, sitting on a chair in a waiting area and smoking a cigarette. The black male turns his head to the left. In the background, there are lockers and bookshelves, with a \"NO SMOKING\" sign posted on the wall.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 2 }, { "content": "A black police officer dresses in a dark green uniform, wearing sunglasses and a wide-brimmed hat, sitting on a chair in a waiting area and smoking a cigarette.", "type": "attribute", "weight": 3 }, { "content": "The black male turns his head to the left.", "type": "action", "weight": 3 }, { "content": "There are lockers and bookshelves, with a \"NO SMOKING\" sign posted on the wall.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the ticket booth, where the male in the red coat is now closing his eyes, supporting his head with his right hand placed on his forehead. The male in the red coat opens his eyes and looks forward, lowering his right hand.", "visual_elements": [ { "content": "The camera cuts back to the ticket booth.", "type": "camera", "weight": 2 }, { "content": "The male in the red coat is now closing his eyes, supporting his head with his right hand placed on his forehead.", "type": "attribute", "weight": 3 }, { "content": "The male in the red coat opens his eyes and looks forward, lowering his right hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts once again to the black police officer, who is holding a cigarette in his right hand, turning his head to the right, and leaning forward to speak.", "visual_elements": [ { "content": "The camera cuts once again to the black police officer.", "type": "camera", "weight": 3 }, { "content": "The black police officer is holding a cigarette in his right hand, turning his head to the right, and leaning forward to speak.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a wider angle, showing a panoramic view of the waiting area. To the right of the black police officer, separated by one seat, sits a male dresses in a white shirt and dark suit pants. The male in the white shirt holds something in his right hand and a cup of drink in his left hand. He looks towards his left front and takes a sip from his drink.", "visual_elements": [ { "content": "The camera cuts to a wider angle, showing a panoramic view of the waiting area.", "type": "camera", "weight": 3 }, { "content": "To the right of the black police officer, separated by one seat, sits a male dresses in a white shirt and dark suit pants.", "type": "attribute", "weight": 3 }, { "content": "The male in the white shirt holds something in his right hand and a cup of drink in his left hand.", "type": "attribute", "weight": 2 }, { "content": "The male in the white shirt looks towards his left front and takes a sip from his drink.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 18 }, { "index": "TUNA_0400", "video_path": "VELOCITI/v_XPUqjed6k4s_seg_100_110.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a male dresses in a white shirt and black pants, crouching behind a counter. He attempts to remove a red cloth draped over a barstool in front of him, but suddenly, the barstool collapses.\nThe camera cuts to a middle-aged male dresses in a light yellow uniform, bent over and standing next to a coffee machine. Suddenly, the middle-aged male turns his head to look back.\nThe camera cuts back to the male in the white shirt, who is trying to untangle the red cloth wrapped around the barstool.\nThe camera cuts once again to the middle-aged male standing next to the coffee machine, holding a cup with both hands and turning his head to look back. Suddenly, he turns his head to look at the cup in front of him, and then he turns back to look behind him again.\nThe camera cuts back to the male in the white shirt, who stands up, picks up the fallen barstool, and holding the red cloth in his hand, turns and walks away.\nThe camera finally cuts back to the middle-aged male, who is still holding the cup in front of him with both hands, twisting his body to look back while shouting something. Then, he turns his head to look at the cup in front of him again.", "events": [ { "event": "The camera focuses on a male dresses in a white shirt and black pants, crouching behind a counter. He attempts to remove a red cloth draped over a barstool in front of him, but suddenly, the barstool collapses.", "visual_elements": [ { "content": "The camera focuses on a male dresses in a white shirt and black pants.", "type": "camera", "weight": 3 }, { "content": "The male crouching behind a counter.", "type": "attribute", "weight": 3 }, { "content": "The male attempts to remove a red cloth draped over a barstool in front of him.", "type": "action", "weight": 3 }, { "content": "Suddenly, the barstool collapses.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a middle-aged male dresses in a light yellow uniform, bent over and standing next to a coffee machine. Suddenly, the middle-aged male turns his head to look back.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A middle-aged male dresses in a light yellow uniform, bent over and standing next to a coffee machine.", "type": "attribute", "weight": 3 }, { "content": "Suddenly, the middle-aged male turns his head to look back.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the male in the white shirt, who is trying to untangle the red cloth wrapped around the barstool.", "visual_elements": [ { "content": "The camera cuts back to the male in the white shirt.", "type": "camera", "weight": 3 }, { "content": "The male in the white shirt is trying to untangle the red cloth wrapped around the barstool.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts once again to the middle-aged male standing next to the coffee machine, holding a cup with both hands and turning his head to look back. Suddenly, he turns his head to look at the cup in front of him, and then he turns back to look behind him again.", "visual_elements": [ { "content": "The camera cuts once again to the middle-aged male.", "type": "camera", "weight": 3 }, { "content": "The middle-aged male standing next to the coffee machine, holding a cup with both hands and turning his head to look back.", "type": "attribute", "weight": 3 }, { "content": "Suddenly, the middle-aged male turns his head to look at the cup in front of him.", "type": "action", "weight": 3 }, { "content": "Then the middle-aged male turns back to look behind him again.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts back to the male in the white shirt, who stands up, picks up the fallen barstool, and holding the red cloth in his hand, turns and walks away.", "visual_elements": [ { "content": "The camera cuts back to the male in the white shirt.", "type": "camera", "weight": 2 }, { "content": "The male in the white shirt stands up, picks up the fallen barstool.", "type": "action", "weight": 3 }, { "content": "The male in the white shirt is holding the red cloth in his hand, turns and walks away.", "type": "action", "weight": 2 } ] }, { "event": "The camera finally cuts back to the middle-aged male, who is still holding the cup in front of him with both hands, twisting his body to look back while shouting something. Then, he turns his head to look at the cup in front of him again.", "visual_elements": [ { "content": "The camera finally cuts back to the middle-aged male.", "type": "camera", "weight": 3 }, { "content": "The middle-aged male is still holding the cup in front of him with both hands, twisting his body to look back while shouting something.", "type": "action", "weight": 2 }, { "content": "The middle-aged male turns his head to look at the cup in front of him again.", "type": "action", "weight": 3 } ] } ], "n_events": 6, "n_elements": 19 }, { "index": "TUNA_0401", "video_path": "VELOCITI/v_XYwGKDK87DI_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on two young males dressed in white chef uniforms, sitting behind a workbench in a kitchen. The male on the right is eating with chopsticks in his right hand and a bowl in his left, his head bows down. The male next to him, on his right, is holding chopsticks in his right hand and supporting a bowl with his left, constantly looking forward and talking. In the foreground, there are several cups and two plastic containers filled with ingredients on the table. The background shows a typical commercial kitchen, with towels and cooking utensils hanging on the counter, and shelves lined with bowls, plates, and other kitchen supplies.\nThe camera cuts to a chef dresses in a white uniform, holding a knife in his hand and working on scaling a fish.\nThe camera cuts once again to a young male dressed in a white chef uniform, lowering his head and focusing intently on the task in his hands.", "events": [ { "event": "The camera focuses on two young males dressed in white chef uniforms, sitting behind a workbench in a kitchen. The male on the right is eating with chopsticks in his right hand and a bowl in his left, his head bows down. The male next to him, on his right, is holding chopsticks in his right hand and supporting a bowl with his left, constantly looking forward and talking. In the foreground, there are several cups and two plastic containers filled with ingredients on the table. The background shows a typical commercial kitchen, with towels and cooking utensils hanging on the counter, and shelves lined with bowls, plates, and other kitchen supplies.", "visual_elements": [ { "content": "The camera focuses on two young males dressed in white chef uniforms.", "type": "camera", "weight": 3 }, { "content": "Two males sitting behind a workbench in a kitchen.", "type": "attribute", "weight": 3 }, { "content": "The male on the right is eating with chopsticks in his right hand and a bowl in his left, his head bows down.", "type": "action", "weight": 3 }, { "content": "The male next to him, on his right, is holding chopsticks in his right hand and supporting a bowl with his left, constantly looking forward and talking.", "type": "action", "weight": 3 }, { "content": "In the foreground, there are several cups and two plastic containers filled with ingredients on the table.", "type": "scene", "weight": 2 }, { "content": "The background shows a typical commercial kitchen, with towels and cooking utensils hanging on the counter, and shelves lined with bowls, plates, and other kitchen supplies.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a chef dresses in a white uniform, holding a knife in his hand and working on scaling a fish.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "a chef dresses in a white uniform, holding a knife in his hand and working on scaling a fish.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts once again to a young male dressed in a white chef uniform, lowering his head and focusing intently on the task in his hands.", "visual_elements": [ { "content": "The camera cuts once again.", "type": "camera", "weight": 3 }, { "content": "A young male dressed in a white chef uniform, lowering his head and focusing intently on the task in his hands.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0402", "video_path": "VELOCITI/v_Xb6svoM3UWE_seg_60_70.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on the side face of a male, who is wearing a wide-brimmed hat, a dark-colored shirt, and a khaki vest. The background is blurred, with a cluster of green leaves swaying in the wind.\nThe camera cuts to show the male on the left side of the frame, still wearing the wide-brimmed hat, dark-colored shirt, and khaki vest. To his left is a female dressed in a white shirt and also wearing a wide-brimmed hat. The male stands up and walks forward. The camera follows the male as it pans upwards. The male is holding a rifle in his hand.\nThe camera cuts to show a black male wearing a sleeveless vest and a brown hat, carrying a rifle, entering from the left side of the frame and exiting to the right. Following behind the black male are the male in the khaki vest and the female in the white shirt. In the foreground, green branches and leaves are visible. In the background, sturdy tree trunks can be seen.", "events": [ { "event": "The camera focuses on the side face of a male, who is wearing a wide-brimmed hat, a dark-colored shirt, and a khaki vest. The background is blurred, with a cluster of green leaves swaying in the wind.", "visual_elements": [ { "content": "The camera focuses on the side face of a male.", "type": "camera", "weight": 3 }, { "content": "The male is wearing a wide-brimmed hat, a dark-colored shirt, and a khaki vest.", "type": "attribute", "weight": 3 }, { "content": "The background is blurred, with a cluster of green leaves swaying in the wind.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to show the male on the left side of the frame, still wearing the wide-brimmed hat, dark-colored shirt, and khaki vest. To his left is a female dressed in a white shirt and also wearing a wide-brimmed hat. The male stands up and walks forward. The camera follows the male as it pans upwards. The male is holding a rifle in his hand.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The male on the left side of the frame, still wearing the wide-brimmed hat, dark-colored shirt, and khaki vest.", "type": "attribute", "weight": 3 }, { "content": "To the male's left is a female dressed in a white shirt and also wearing a wide-brimmed hat.", "type": "attribute", "weight": 3 }, { "content": "The male stands up and walks forward.", "type": "action", "weight": 3 }, { "content": "The camera follows the male as it pans upwards.", "type": "camera", "weight": 3 }, { "content": "The male is holding a rifle in his hand.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to show a black male wearing a sleeveless vest and a brown hat, carrying a rifle, entering from the left side of the frame and exiting to the right. Following behind the black male are the male in the khaki vest and the female in the white shirt. In the foreground, green branches and leaves are visible. In the background, sturdy tree trunks can be seen.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A black male wearing a sleeveless vest and a brown hat, carrying a rifle, entering from the left side of the frame and exiting to the right.", "type": "action", "weight": 3 }, { "content": "Following behind the black male are the male in the khaki vest and the female in the white shirt.", "type": "action", "weight": 3 }, { "content": "In the foreground, green branches and leaves are visible.", "type": "scene", "weight": 2 }, { "content": "In the background, sturdy tree trunks can be seen.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0403", "video_path": "VELOCITI/v_XxWjAkr7ujk_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the start of the video, the camera focuses on a group of people walking along a wall. On the left side of the frame is a stone wall with some weeds growing on top. The people walking along the wall are dressed in different colored clothes, and each one is holding a gun. Except for a male wearing a blue short-sleeved shirt and a khaki bulletproof vest, the faces of the others are blurred. The camera moves backward as the group walks forward. A man wearing a gray T-shirt and a hat runs off the screen to the right. The man in the bulletproof vest walks forward while waving his hand to dismiss the person holding the camera. As the camera pans backward and slightly to the right, a building with a red door can be seen on the right side of the road. The man in the bulletproof vest urges the others to leave the screen to the right.The man in the bulletproof vest raises the rifle he is holding and exits the screen to the right as well. A male wearing a red short-sleeved shirt and a dark-colored bulletproof vest, with his face blurred, approaches the camera and gives a thumbs-up sign.", "events": [ { "event": "At the start of the video, the camera focuses on a group of people walking along a wall. On the left side of the frame is a stone wall with some weeds growing on top. The people walking along the wall are dressed in different colored clothes, and each one is holding a gun. Except for a male wearing a blue short-sleeved shirt and a khaki bulletproof vest, the faces of the others are blurred. The camera moves backward as the group walks forward. A man wearing a gray T-shirt and a hat runs off the screen to the right. The man in the bulletproof vest walks forward while waving his hand to dismiss the person holding the camera. As the camera pans backward and slightly to the right, a building with a red door can be seen on the right side of the road. The man in the bulletproof vest urges the others to leave the screen to the right.The man in the bulletproof vest raises the rifle he is holding and exits the screen to the right as well. A male wearing a red short-sleeved shirt and a dark-colored bulletproof vest, with his face blurred, approaches the camera and gives a thumbs-up sign.", "visual_elements": [ { "content": "The camera focuses on a group of people walking along a wall.", "type": "camera", "weight": 3 }, { "content": "On the left side of the frame is a stone wall.", "type": "attribute", "weight": 3 }, { "content": "Some weeds growing on wall top.", "type": "attribute", "weight": 1 }, { "content": "The people walking along the wall are dressed in different colored clothes.", "type": "attribute", "weight": 2 }, { "content": "Each one is holding a gun.", "type": "attribute", "weight": 2 }, { "content": "Except for a male wearing a blue short-sleeved shirt and a khaki bulletproof vest, the faces of the others are blurred.", "type": "attribute", "weight": 3 }, { "content": "The camera moves backward as the group walks forward.", "type": "camera", "weight": 3 }, { "content": "A man wearing a gray T-shirt and a hat runs off the screen to the right.", "type": "action", "weight": 3 }, { "content": "A man in the bulletproof vest walks forward while waving his hand to dismiss the person holding the camera.", "type": "action", "weight": 2 }, { "content": "As the camera pans backward and slightly to the right", "type": "camera", "weight": 3 }, { "content": "A building with a red door can be seen on the right side of the road.", "type": "scene", "weight": 2 }, { "content": "The man in the bulletproof vest urges the others to leave the screen to the right.", "type": "action", "weight": 3 }, { "content": "The man in the bulletproof vest raises the rifle he is holding and exits the screen to the right as well.", "type": "action", "weight": 3 }, { "content": "A male wearing a red short-sleeved shirt and a dark-colored bulletproof vest, with his face blurred, approaches the camera and gives a thumbs-up sign.", "type": "action", "weight": 3 } ] } ], "n_events": 1, "n_elements": 14 }, { "index": "TUNA_0404", "video_path": "VELOCITI/v_YG-plVmM7O4_seg_90_100.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the face of a male lying on the ground, with the shot shaking violently and the male's face obscure. A stainless steel basin can be seen in the top left corner of the frame.\nThe camera cuts to a corridor in front of a door, where a middle-aged man dressed in a brown sweater and khaki pants runs out of a room on the left side of the frame, following by a woman wearing an orange and dark gray plaid shirt with blue jeans. Both of them run towards the right side of the screen. The camera follows their movement to the right. The camera stops moving as they are exit the frame to the right. In the scene, there are white glass double doors, and a little girl opens the door and walks out of the room. She is dressing in a pink short-sleeved shirt and a floral-patterned dress with suspenders. Behind her is a little boy wearing a black and white striped shirt. Finally, a young woman dressed in a yellow short-sleeved shirt and dark green pants also runs out of the house.\nThe camera cuts and pans to the right, with the middle-aged man and the plaid shirt woman entering from the left side of the frame, running towards a dog cage enclosed with wire mesh. Inside the cage, a large Saint Bernard dog is pressing down on a person. As the man is about to open the cage door, the others also arrive at the entrance of the dog cage.", "events": [ { "event": "At the beginning of the video, the camera focuses on the face of a male lying on the ground, with the shot shaking violently and the male's face obscure. A stainless steel basin can be seen in the top left corner of the frame.", "visual_elements": [ { "content": "The camera focuses on the face of a male lying on the ground.", "type": "camera", "weight": 3 }, { "content": "The camera shot shaking violently.", "type": "camera", "weight": 3 }, { "content": "The male's face is obscure.", "type": "attribute", "weight": 2 }, { "content": "A stainless steel basin can be seen in the top left corner of the frame.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a corridor in front of a door, where a middle-aged man dressed in a brown sweater and khaki pants runs out of a room on the left side of the frame, following by a woman wearing an orange and dark gray plaid shirt with blue jeans. Both of them run towards the right side of the screen. The camera follows their movement to the right. The camera stops moving as they are exit the frame to the right. In the scene, there are white glass double doors, and a little girl opens the door and walks out of the room. She is dressing in a pink short-sleeved shirt and a floral-patterned dress with suspenders. Behind her is a little boy wearing a black and white striped shirt. Finally, a young woman dressed in a yellow short-sleeved shirt and dark green pants also runs out of the house.", "visual_elements": [ { "content": "The camera cuts to a corridor in front of a door.", "type": "camera", "weight": 3 }, { "content": "A middle-aged man dressed in a brown sweater and khaki pants runs out of a room on the left side of the frame.", "type": "action", "weight": 3 }, { "content": "Behind a middle-aged man following by a woman wearing an orange and dark gray plaid shirt with blue jeans.", "type": "action", "weight": 3 }, { "content": "Both of them run towards the right side of the screen.", "type": "action", "weight": 2 }, { "content": "The camera follows their movement to the right.", "type": "camera", "weight": 3 }, { "content": "The camera stops moving.", "type": "camera", "weight": 3 }, { "content": "Both of them exit the frame to the right.", "type": "action", "weight": 2 }, { "content": "In the scene, there are white glass double doors.", "type": "attribute", "weight": 3 }, { "content": "A little girl opens the door and walks out of the room.", "type": "action", "weight": 3 }, { "content": "The girl is dressing in a pink short-sleeved shirt and a floral-patterned dress with suspenders.", "type": "action", "weight": 3 }, { "content": "Behind the girl is a little boy wearing a black and white striped shirt.", "type": "action", "weight": 3 }, { "content": "A young woman dressed in a yellow short-sleeved shirt and dark green pants also runs out of the house.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts and pans to the right, with the middle-aged man and the plaid shirt woman entering from the left side of the frame, running towards a dog cage enclosed with wire mesh. Inside the cage, a large Saint Bernard dog is pressing down on a person. As the man is about to open the cage door, the others also arrive at the entrance of the dog cage.", "visual_elements": [ { "content": "The camera cuts and pans to the right.", "type": "camera", "weight": 3 }, { "content": "The middle-aged man and the plaid shirt woman entering from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "They are running towards a dog cage enclosed with wire mesh.", "type": "action", "weight": 3 }, { "content": "Inside the cage, a large Saint Bernard dog is pressing down on a person.", "type": "attribute", "weight": 2 }, { "content": "As the man is about to open the cage door, the others also arrive at the entrance of the dog cage.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0405", "video_path": "VELOCITI/v_YXt8RmeU_AA_seg_55_65.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "In the video, a light brown Ford sedan moves forward and approaches the camera, then stops on the left lane of the highway with its headlights on. Both the driver and the passenger in the front seat are occupying. After stopping, the female passenger opens the door and gets out. She is wearing a light blue plaid shirt and dark nine-point pants. After closing the door, she adjusts her backpack that's hanging on her shoulder. The female turns her head and gives the middle finger to the male driver. Then, she walks to the front of the car, turns her back to the camera, raises her right arm, and sticks out her thumb to hitchhike. The male driver also opens his door and gets out. He places his right hand on the car roof and looks down at his phone in his left hand. In the background, there is lush green vegetation on both sides of the road, and other vehicles are driving on the road.", "events": [ { "event": "In the video, a light brown Ford sedan moves forward and approaches the camera, then stops on the left lane of the highway with its headlights on. Both the driver and the passenger in the front seat are occupying. After stopping, the female passenger opens the door and gets out. She is wearing a light blue plaid shirt and dark nine-point pants. After closing the door, she adjusts her backpack that's hanging on her shoulder. The female turns her head and gives the middle finger to the male driver. Then, she walks to the front of the car, turns her back to the camera, raises her right arm, and sticks out her thumb to hitchhike. The male driver also opens his door and gets out. He places his right hand on the car roof and looks down at his phone in his left hand. In the background, there is lush green vegetation on both sides of the road, and other vehicles are driving on the road.", "visual_elements": [ { "content": "A light brown Ford sedan moves forward and approaches the camera.", "type": "action", "weight": 3 }, { "content": "The sedan stops on the left lane of the highway.", "type": "attribute", "weight": 3 }, { "content": "The sedan's headlights on.", "type": "attribute", "weight": 2 }, { "content": "Both the driver and the passenger in the front seat are occupying.", "type": "attribute", "weight": 2 }, { "content": "After stopping, the female passenger opens the door and gets out.", "type": "action", "weight": 3 }, { "content": "The female is wearing a light blue plaid shirt and dark nine-point pants.", "type": "attribute", "weight": 2 }, { "content": "The female is closing the door, she adjusts her backpack that's hanging on her shoulder.", "type": "action", "weight": 3 }, { "content": "The female turns her head and gives the middle finger to the male driver.", "type": "action", "weight": 2 }, { "content": "The female walks to the front of the car, turns her back to the camera, raises her right arm, and sticks out her thumb to hitchhike.", "type": "action", "weight": 3 }, { "content": "The male driver also opens his door and gets out.", "type": "action", "weight": 3 }, { "content": "The male places his right hand on the car roof and looks down at his phone in his left hand.", "type": "attribute", "weight": 3 }, { "content": "In the background, there is lush green vegetation on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "There are other vehicles are driving on the road.", "type": "scene", "weight": 2 } ] } ], "n_events": 1, "n_elements": 13 }, { "index": "TUNA_0406", "video_path": "VELOCITI/v_YXt8RmeU_AA_seg_70_80.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a middle-aged male standing beside the car, dressing in a dark-colored shirt. He holds the car keys in his right hand and leans his right arm on the car roof. The male looks up towards the front and then lowers his head to look at his phone in his left hand.\nThe camera cuts to a focus on an overweight woman wearing a blue and white plaid shirt and carrying a gray backpack. After saying something angrily to someone across from her, she turns and runs towards the front. In the background, a pedestrian bridge can be seen. Vehicles are driving on the highway, and there is lush green vegetation on the left side of the road.\nThe camera cuts again, showing the overweight woman running towards the right side of the frame. Soon after, the male who was standing beside the car also starts running to the right, chasing after the woman. There is a patch of green grass in the foreground. There are pedestrian bridges on both the left and right sides of the frame.\nThe camera cuts to a view of the overweight woman running ahead with great effort. Behind her, the male is chasing her while talking on the phone. A black pickup truck passes by on the road next to them.", "events": [ { "event": "The camera focuses on a middle-aged male standing beside the car, dressing in a dark-colored shirt. He holds the car keys in his right hand and leans his right arm on the car roof. The male looks up towards the front and then lowers his head to look at his phone in his left hand.", "visual_elements": [ { "content": "The camera focuses on a middle-aged male standing beside the car,dressing in a dark-colored shirt.", "type": "camera", "weight": 3 }, { "content": "The male holds the car keys in his right hand and leans his right arm on the car roof.", "type": "attribute", "weight": 3 }, { "content": "The male looks up towards the front and then lowers his head to look at his phone in his left hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a focus on an overweight woman wearing a blue and white plaid shirt and carrying a gray backpack. After saying something angrily to someone across from her, she turns and runs towards the front. In the background, a pedestrian bridge can be seen. Vehicles are driving on the highway, and there is lush green vegetation on the left side of the road.", "visual_elements": [ { "content": "The camera cuts to a focus on an overweight woman.", "type": "camera", "weight": 3 }, { "content": "The woman wearing a blue and white plaid shirt and carrying a gray backpack.", "type": "attribute", "weight": 2 }, { "content": "After the woman saying something angrily to someone across from her, she turns and runs towards the front.", "type": "action", "weight": 3 }, { "content": "In the background, a pedestrian bridge can be seen.", "type": "scene", "weight": 2 }, { "content": "Vehicles are driving on the highway.", "type": "scene", "weight": 2 }, { "content": "There is lush green vegetation on the left side of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts again, showing the overweight woman running towards the right side of the frame. Soon after, the male who was standing beside the car also starts running to the right, chasing after the woman. There is a patch of green grass in the foreground. There are pedestrian bridges on both the left and right sides of the frame.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The overweight woman running towards the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The male who was standing beside the car also starts running to the right, chasing after the woman.", "type": "action", "weight": 3 }, { "content": "There is a patch of green grass in the foreground.", "type": "scene", "weight": 2 }, { "content": "There are pedestrian bridges on both the left and right sides of the frame.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a view of the overweight woman running ahead with great effort. Behind her, the male is chasing her while talking on the phone. A black pickup truck passes by on the road next to them.", "visual_elements": [ { "content": "The camera cuts to a view of the overweight woman.", "type": "camera", "weight": 3 }, { "content": "The overweight woman running ahead with great effort.", "type": "action", "weight": 3 }, { "content": "Behind the overweight woman, the male is chasing her while talking on the phone.", "type": "action", "weight": 3 }, { "content": "A black pickup truck passes by on the road next to them.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0407", "video_path": "VELOCITI/v_YlF1gLpUZp8_seg_5_15.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a beach where the waves gently lap against the sand, and dense woods are visible in the distance.\nThen, the camera cuts to an old boat parked on the beach, with a blue and white hull. A young boy dressed in a white and navy blue short-sleeved shirt stands at the stern, inspecting the boat's engine. A man with his upper torso exposed stands up from the cabin and enters the frame.\nThe camera cuts to a close-up of the boy's face at the stern, and he suddenly looks up towards the front.\nThe camera cuts, and a young boy wearing a dark hoodie and dark jeans walks towards the camera from a distance. In the background, the land is mostly barren, with patches of dead grass or no grass at all. However, there are also areas with green meadows, and green forests can be seen in the distance.\nThen, the camera cuts back to the boat, where the shirtless man, now wearing jeans, steps out of the cabin and prepares to jump off the boat.", "events": [ { "event": "At the beginning of the video, the camera focuses on a beach where the waves gently lap against the sand, and dense woods are visible in the distance.", "visual_elements": [ { "content": "The camera focuses on a beach.", "type": "camera", "weight": 3 }, { "content": "The waves gently lap against the sand", "type": "attribute", "weight": 3 }, { "content": "And dense woods are visible in the distance.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera cuts to an old boat parked on the beach, with a blue and white hull. A young boy dressed in a white and navy blue short-sleeved shirt stands at the stern, inspecting the boat's engine. A man with his upper torso exposed stands up from the cabin and enters the frame.", "visual_elements": [ { "content": "The camera cuts to an old boat parked on the beach.", "type": "camera", "weight": 2 }, { "content": "The boat's color is a blue and white hull.", "type": "attribute", "weight": 2 }, { "content": "A young boy dressed in a white and navy blue short-sleeved shirt stands at the stern.", "type": "attribute", "weight": 2 }, { "content": "The young boy stands at the stern, inspecting the boat's engine.", "type": "action", "weight": 3 }, { "content": "A man with his upper torso exposed stands up from the cabin and enters the frame.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up of the boy's face at the stern, and he suddenly looks up towards the front.", "visual_elements": [ { "content": "The camera cuts to a close-up of the boy's face at the stern.", "type": "camera", "weight": 3 }, { "content": "The young boy suddenly looks up towards the front.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts, and a young boy wearing a dark hoodie and dark jeans walks towards the camera from a distance. In the background, the land is mostly barren, with patches of dead grass or no grass at all. However, there are also areas with green meadows, and green forests can be seen in the distance.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A young boy wearing a dark hoodie and dark jeans walks towards the camera from a distance.", "type": "action", "weight": 3 }, { "content": "In the background, the land is mostly barren, with patches of dead grass or no grass at all.", "type": "scene", "weight": 2 }, { "content": "there are also areas with green meadows, and green forests can be seen in the distance.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera cuts back to the boat, where the shirtless man, now wearing jeans, steps out of the cabin and prepares to jump off the boat.", "visual_elements": [ { "content": "The camera cuts back to the boat.", "type": "camera", "weight": 3 }, { "content": "The shirtless man is wearing jeans.", "type": "attribute", "weight": 2 }, { "content": "The man steps out of the cabin and prepares to jump off the boat.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 17 }, { "index": "TUNA_0408", "video_path": "VELOCITI/v_YlF1gLpUZp8_seg_65_75.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a young boy standing on a barren piece of land, with some green grass and trees in the background. He wears a dark hoodie with a gray T-shirt underneath, and tears are streaming down his face as he speaks to someone opposite him. Subsequently, the boy turns around and walks away from the camera, his back facing us. His jeans are stained with dirt and mud.\nThe camera cuts to a shirtless adult male standing beside a blue and white boat. He looks ahead in silence. In the foreground, a boy with a buzz cut enters from the right side of the frame and exits from the left side.\nThe camera cuts again, showing the boy in the dark hoodie continuing to walk forward on the barren land, his back facing the camera. A buzz-cut boy wearing a white and navy blue short-sleeved shirt runs with his back to the camera, chasing the boy in the dark hoodie ahead of him.", "events": [ { "event": "The camera focuses on a young boy standing on a barren piece of land, with some green grass and trees in the background. He wears a dark hoodie with a gray T-shirt underneath, and tears are streaming down his face as he speaks to someone opposite him. Subsequently, the boy turns around and walks away from the camera, his back facing us. His jeans are stained with dirt and mud.", "visual_elements": [ { "content": "The camera focuses on a young boy standing on a barren piece of land.", "type": "camera", "weight": 3 }, { "content": "Some green grass and trees in the background.", "type": "scene", "weight": 2 }, { "content": "The boy wears a dark hoodie with a gray T-shirt underneath.", "type": "attribute", "weight": 3 }, { "content": "Tears are streaming down the boy's face as he speaks to someone opposite him.", "type": "action", "weight": 3 }, { "content": "The boy turns around and walks away from the camera , his back facing us.", "type": "action", "weight": 3 }, { "content": "The boy'sjeans are stained with dirt and mud.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to a shirtless adult male standing beside a blue and white boat. He looks ahead in silence. In the foreground, a boy with a buzz cut enters from the right side of the frame and exits from the left side.", "visual_elements": [ { "content": "The camera cuts to a shirtless adult male.", "type": "camera", "weight": 3 }, { "content": "The male is standing beside a blue and white boat.", "type": "attribute", "weight": 2 }, { "content": "The male looks ahead in silence.", "type": "attribute", "weight": 3 }, { "content": "In the foreground, a boy with a buzz cut enters from the right side of the frame and exits from the left side.", "type": "action", "weight": 2 } ] }, { "event": "The camera cuts again, showing the boy in the dark hoodie continuing to walk forward on the barren land, his back facing the camera. A buzz-cut boy wearing a white and navy blue short-sleeved shirt runs with his back to the camera, chasing the boy in the dark hoodie ahead of him.", "visual_elements": [ { "content": "The camera cuts again.", "type": "camera", "weight": 3 }, { "content": "The boy in the dark hoodie continuing to walk forward on the barren land, his back facing the camera.", "type": "action", "weight": 3 }, { "content": "A buzz-cut boy wearing a white and navy blue short-sleeved shirt runs with his back to the camera, chasing the boy in the dark hoodie ahead of him.", "type": "action", "weight": 3 }, { "content": "The buzz-cut boy is chasing the boy in the dark hoodie ahead of him.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0409", "video_path": "VELOCITI/v_Z41Lvaw_W6E_seg_65_75.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a man wearing a black short-sleeved shirt, with his back facing the camera. He stands on a balcony, overlooking a cluster of red-brick buildings in the distance, where green plants and flowers adorn the windows and balconies. The man bends down, elbows resting on the railing, and lights a cigarette in his mouth. After taking a puff, he holds the cigarette in his left hand. He first looks down and then turns his head to the left. A woman with a bun hairstyle and wearing a black short-sleeved shirt enters from the left side of the frame and comes to stand beside the man. They start conversing, and the man turns to face the camera.", "events": [ { "event": "The camera focuses on a man wearing a black short-sleeved shirt, with his back facing the camera. He stands on a balcony, overlooking a cluster of red-brick buildings in the distance, where green plants and flowers adorn the windows and balconies. The man bends down, elbows resting on the railing, and lights a cigarette in his mouth. After taking a puff, he holds the cigarette in his left hand. He first looks down and then turns his head to the left. A woman with a bun hairstyle and wearing a black short-sleeved shirt enters from the left side of the frame and comes to stand beside the man. They start conversing, and the man turns to face the camera.", "visual_elements": [ { "content": "The camera focuses on a man wearing a black short-sleeved shirt, with his back facing the camera.", "type": "camera", "weight": 3 }, { "content": "A man stands on a balcony", "type": "attribute", "weight": 2 }, { "content": "The man is overlooking a cluster of red-brick buildings in the distance.", "type": "scene", "weight": 2 }, { "content": "The green plants and flowers adorn the windows and balconies.", "type": "scene", "weight": 2 }, { "content": "The man bends down, elbows resting on the railing, and lights a cigarette in his mouth.", "type": "action", "weight": 3 }, { "content": "After the man taking a puff, he holds the cigarette in his left hand.", "type": "action", "weight": 2 }, { "content": "The man first looks down and then turns his head to the left.", "type": "action", "weight": 3 }, { "content": "A woman with a bun hairstyle and wearing a black short-sleeved shirt enters from the left side of the frame and comes to stand beside the man.", "type": "action", "weight": 3 }, { "content": "Both of them start conversing.", "type": "action", "weight": 3 }, { "content": "The man turns to face the camera.", "type": "action", "weight": 2 } ] } ], "n_events": 1, "n_elements": 10 }, { "index": "TUNA_0410", "video_path": "VELOCITI/v_Z8vCm7TUK8c_seg_35_45.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on two men wearing khaki uniforms. The man on the left side of the frame is bending down to support a red handcart loaded with cardboard boxes and white furniture. The man in the middle holds a folder in his hand, with several pens tucked into his chest pocket. The man with the folder turns his head to speak to the man on the left, who then pushes the handcart and exits from the left side of the frame. In the background, there is a row of bookshelves fills with various books and some decorative items. Embedding within the shelves is an old-fashioned television set with a program playing.\nThe camera cuts to an elderly woman with white hair, dressing in a dark patterned outfit. She has a slight frown on her face as she speaks to someone opposite her. The background features a large glass window, through which green plants can be seen.\nThe camera cuts to the man with several pens tucks into his chest pocket. He raises his right index finger and points to his temple while speaking. Subsequently, he lowers his right hand.\nFinally, the scene cuts back to the elderly woman. She looks around to the left and right, then raises her right hand while speaking and walks towards the left side of the frame.", "events": [ { "event": "The camera focuses on two men wearing khaki uniforms. The man on the left side of the frame is bending down to support a red handcart loaded with cardboard boxes and white furniture. The man in the middle holds a folder in his hand, with several pens tucked into his chest pocket. The man with the folder turns his head to speak to the man on the left, who then pushes the handcart and exits from the left side of the frame. In the background, there is a row of bookshelves fills with various books and some decorative items. Embedding within the shelves is an old-fashioned television set with a program playing.", "visual_elements": [ { "content": "The camera focuses on two men wearing khaki uniforms.", "type": "camera", "weight": 3 }, { "content": "The man on the left side of the frame is bending down to support a red handcart.", "type": "attribute", "weight": 3 }, { "content": "There are some cardboard boxes and white furniture in the handcart.", "type": "attribute", "weight": 2 }, { "content": "The man in the middle holds a folder in his hand, with several pens tucked into his chest pocket.", "type": "attribute", "weight": 2 }, { "content": "The man with the folder turns his head to speak to the man on the left.", "type": "action", "weight": 3 }, { "content": "The man on the left pushes the handcart and exits from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "In the background, there is a row of bookshelves fills with various books and some decorative items.", "type": "scene", "weight": 2 }, { "content": "Embedding within the shelves is an old-fashioned television set with a program playing.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to an elderly woman with white hair, dressing in a dark patterned outfit. She has a slight frown on her face as she speaks to someone opposite her. The background features a large glass window, through which green plants can be seen.", "visual_elements": [ { "content": "The camera cuts to an elderly woman with white hair", "type": "camera", "weight": 3 }, { "content": "The elderly woman is dressing in a dark patterned outfit.", "type": "attribute", "weight": 2 }, { "content": "The elderly woman has a slight frown on her face as she speaks to someone opposite her.", "type": "action", "weight": 3 }, { "content": "The background features a large glass window, through which green plants can be seen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to the man with several pens tucks into his chest pocket. He raises his right index finger and points to his temple while speaking. Subsequently, he lowers his right hand.", "visual_elements": [ { "content": "The camera cuts to the man with several pens tucks into his chest pocket.", "type": "camera", "weight": 3 }, { "content": "The man raises his right index finger and points to his temple while speaking.", "type": "action", "weight": 3 }, { "content": "Subsequently,the man lowers his right hand.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the scene cuts back to the elderly woman. She looks around to the left and right, then raises her right hand while speaking and walks towards the left side of the frame.", "visual_elements": [ { "content": "The scene cuts back to the elderly woman.", "type": "camera", "weight": 3 }, { "content": "The elderly woman looks around to the left and right.", "type": "action", "weight": 2 }, { "content": "Then the elderly woman raises her right hand while speaking and walks towards the left side of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0411", "video_path": "VELOCITI/v_ZIOCaOpBGpE_seg_10_20.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.0, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a woman wearing a thick coat that alternates between deep red and black, with a pink undershirt and a light pink silk underwear underneath. She stands up firstly and then removes her coat while speaking to someone across from her. The camera follows her movements upwards. After taking off her coat, she places it behind her. The background is a dimly lit room with a dresser in it, on which various cosmetics and a lamp are placed.\nThe camera cuts to a character with a duck's head, dressed in red checkered pajamas. The character speaks to someone across from him, with a piano and some furniture visible in the background.\nThe camera cuts back to the female character, who turns around and walks towards the bed. She kneels on the bed and throws an item from the bed onto the bedside table. The camera follows her movements to the lower right. The woman only wears panties on her lower half.\nThe camera cuts to the duck-headed character, who opens its mouth wide and walks forward. A mirror in the background reveals the woman still kneeling on the bed.", "events": [ { "event": "At the beginning of the video, the camera focuses on a woman wearing a thick coat that alternates between deep red and black, with a pink undershirt and a light pink silk underwear underneath. She stands up firstly and then removes her coat while speaking to someone across from her. The camera follows her movements upwards. After taking off her coat, she places it behind her. The background is a dimly lit room with a dresser in it, on which various cosmetics and a lamp are placed.", "visual_elements": [ { "content": "The camera focuses on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman wearing a thick coat that alternates between deep red and black, with a pink undershirt and a light pink silk underwear underneath.", "type": "attribute", "weight": 2 }, { "content": "The woman stands up firstly and removes her coat while speaking to someone across from her.", "type": "action", "weight": 3 }, { "content": "The camera follows her movements upwards.", "type": "camera", "weight": 3 }, { "content": "The woman takes off her coat and places it behind her.", "type": "action", "weight": 2 }, { "content": "The background is a dimly lit room.", "type": "scene", "weight": 2 }, { "content": "There is a dresser in the room, on which various cosmetics and a lamp are placed.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a character with a duck's head, dressed in red checkered pajamas. The character speaks to someone across from him, with a piano and some furniture visible in the background.", "visual_elements": [ { "content": "The camera cuts to a character with a duck's head.", "type": "camera", "weight": 3 }, { "content": "The character is dressed in red checkered pajamas.", "type": "attribute", "weight": 2 }, { "content": "The character speaks to someone across from him", "type": "action", "weight": 3 }, { "content": "There are a piano and some furniture visible in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the female character, who turns around and walks towards the bed. She kneels on the bed and throws an item from the bed onto the bedside table. The camera follows her movements to the lower right. The woman only wears panties on her lower half.", "visual_elements": [ { "content": "The camera cuts back to the female character", "type": "camera", "weight": 3 }, { "content": "The woman turns around and walks towards the bed.", "type": "action", "weight": 3 }, { "content": "The woman kneels on the bed and throws an item from the bed onto the bedside table.", "type": "action", "weight": 3 }, { "content": "The camera follows her movements to the lower right.", "type": "camera", "weight": 3 }, { "content": "The woman only wears panties on her lower half.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera cuts to the duck-headed character, who opens its mouth wide and walks forward. A mirror in the background reveals the woman still kneeling on the bed.", "visual_elements": [ { "content": "The camera cuts to the duck-headed character", "type": "camera", "weight": 3 }, { "content": "The character opens its mouth wide and walks forward.", "type": "action", "weight": 3 }, { "content": "A mirror in the background reveals the woman still kneeling on the bed.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0412", "video_path": "VELOCITI/v_ZKuscOD0LOM_seg_45_55.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a yellow robot is positioned on the left side of the screen, with its eyes emitting a blue glow. To the right of the robot stands a woman with brown hair dressed in dark clothing. On the woman's right is a black man with an afro hairstyle. The black man turns to his left, bends down, and picks up something. Both the woman standing nearby and the robot are watching his movements. The camera gently pans left and right following the man's actions. After standing up, the man first looks up at the woman and the robot, then lowers his head to open the white box he has just picked up.At the beginning of the video, a yellow robot is positioned on the left side of the screen, with its eyes emitting a blue glow. To the right of the robot stands a woman with brown hair dressed in dark clothing. On the woman's right is a black man with an afro hairstyle. The black man turns to his left, bends down, and picks up something. Both the woman next to him and the robot are watching his movements. The camera gently shifts left and right following the man's actions. After standing up, the man first looks up at the woman and the robot, then lowers his head to open the white box he has just picked up.\nSubsequently, the camera cuts to a close-up of the robot inspecting the contents of the box. In front of the robot is a carton filled with white eggs. A hand enters from the right side of the screen and picks up an egg. The robot's gaze follows the movement of the hand.\nFinally, the camera cuts again to show the man taking an egg out of the carton and holding it up in front of the woman, while they engage in conversation. The man weighs the egg in his hand.", "events": [ { "event": "At the beginning of the video, a yellow robot is positioned on the left side of the screen, with its eyes emitting a blue glow. To the right of the robot stands a woman with brown hair dressed in dark clothing. On the woman's right is a black man with an afro hairstyle. The black man turns to his left, bends down, and picks up something. Both the woman standing nearby and the robot are watching his movements. The camera gently pans left and right following the man's actions. After standing up, the man first looks up at the woman and the robot, then lowers his head to open the white box he has just picked up.At the beginning of the video, a yellow robot is positioned on the left side of the screen, with its eyes emitting a blue glow. To the right of the robot stands a woman with brown hair dressed in dark clothing. On the woman's right is a black man with an afro hairstyle. The black man turns to his left, bends down, and picks up something. Both the woman next to him and the robot are watching his movements. The camera gently shifts left and right following the man's actions. After standing up, the man first looks up at the woman and the robot, then lowers his head to open the white box he has just picked up.", "visual_elements": [ { "content": "A yellow robot is positioned on the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The robot's eyes emit a blue glow.", "type": "attribute", "weight": 2 }, { "content": "To the right of the robot stands a woman with brown hair dressed in dark clothing.", "type": "attribute", "weight": 3 }, { "content": "On the woman's right is a black man with an afro hairstyle.", "type": "attribute", "weight": 3 }, { "content": "The black man turns to his left, bends down, and picks up something.", "type": "action", "weight": 3 }, { "content": "Both the woman standing nearby and the robot are observing the man's actions.", "type": "attribute", "weight": 2 }, { "content": "The camera gently pans left and right following the man's actions.", "type": "camera", "weight": 3 }, { "content": "After standing up, the man first looks up at the woman and the robot.", "type": "action", "weight": 3 }, { "content": "The man lowers his head to open the white box he has just picked up.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera cuts to a close-up of the robot inspecting the contents of the box. In front of the robot is a carton filled with white eggs. A hand enters from the right side of the screen and picks up an egg. The robot's gaze follows the movement of the hand.", "visual_elements": [ { "content": "The camera cuts to a close-up of the robot inspecting the contents of the box.", "type": "camera", "weight": 3 }, { "content": "In front of the robot is a carton filled with white eggs.", "type": "attribute", "weight": 3 }, { "content": "A hand enters from the right side of the screen and picks up an egg.", "type": "action", "weight": 3 }, { "content": "The robot's gaze follows the movement of the hand.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera cuts again to show the man taking an egg out of the carton and holding it up in front of the woman, while they engage in conversation. The man weighs the egg in his hand.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man takes an egg out of the carton and holds it up in front of the woman.", "type": "action", "weight": 3 }, { "content": "The man and the woman are conversing.", "type": "action", "weight": 3 }, { "content": "The man weighs the egg in his hand.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0413", "video_path": "VELOCITI/v_ZcO9A2DK_8o_seg_20_30.mp4", "video_source": "VELOCITI", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the start of the video, the camera focuses on a middle-aged female with short hair. She stands at the doorway of a room that is undergoing renovation. Dressed in a floral pink blouse and a dark-colored coat, she looks ahead and takes two steps forward.\nThen, the camera cuts to a femal with her hair tied in a low bun, crouching beside a chair with her back to the camera. She wears a black jacket and blue jeans.The female wearing black gloves manipulates a tool on the chair. Subsequently, she puts down the tool and turns her head to look behind her. Rising slowly to her feet, she directs her gaze towards her right rear. The camera follows the woman as it pans up. In the background, there are many rolls of pink adhesive tape unfurled, along with a room floor covered with renovation debris.\nThe camera cuts back to the short-haired woman who is standing at the doorway. She waves and greets someone across from her.", "events": [ { "event": "At the start of the video, the camera focuses on a middle-aged female with short hair. She stands at the doorway of a room that is undergoing renovation. Dressed in a floral pink blouse and a dark-colored coat, she looks ahead and takes two steps forward.", "visual_elements": [ { "content": "The camera focuses on a middle-aged female with short hair.", "type": "camera", "weight": 3 }, { "content": "The female stands at the doorway of a room that is undergoing renovation.", "type": "attribute", "weight": 2 }, { "content": "The female wears a floral pink blouse and a dark-colored coat.", "type": "attribute", "weight": 2 }, { "content": "The female looks ahead and takes two steps forward.", "type": "action", "weight": 2 } ] }, { "event": "Then, the camera cuts to a femal with her hair tied in a low bun, crouching beside a chair with her back to the camera. She wears a black jacket and blue jeans.The female wearing black gloves manipulates a tool on the chair. Subsequently, she puts down the tool and turns her head to look behind her. Rising slowly to her feet, she directs her gaze towards her right rear. The camera follows the woman as it pans up. In the background, there are many rolls of pink adhesive tape unfurled, along with a room floor covered with renovation debris.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "A female with her hair styled in a low bun crouches beside a chair, facing away from the camera.", "type": "attribute", "weight": 3 }, { "content": "The female wears a black jacket and blue jeans.", "type": "attribute", "weight": 2 }, { "content": "The female wearing black gloves manipulates a tool on the chair.", "type": "action", "weight": 3 }, { "content": "The female puts down the tool and turns her head to look behind her.", "type": "action", "weight": 3 }, { "content": "Rising slowly to her feet, The female directs her gaze towards her right rear.", "type": "action", "weight": 3 }, { "content": "The camera follows the woman as it pans up.", "type": "camera", "weight": 3 }, { "content": "In the background, there are many rolls of pink adhesive tape unfurled, along with a room floor covered with renovation debris.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts back to the short-haired woman who is standing at the doorway. She waves and greets someone across from her.", "visual_elements": [ { "content": "The camera cuts back to the short-haired woman who is standing at the doorway.", "type": "camera", "weight": 3 }, { "content": "The short-haired female waves and greets someone on the opposite side.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0414", "video_path": "VELOCITI/v_ZruKu2N6nQw_seg_65_75.mp4", "video_source": "VELOCITI", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Film", "duration": 10.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a middle-aged male wearing a dark green sweater with a white collar, holding a silver teapot in his hand. The male looks down and ahead, walking towards the left side of the screen while speaking. He bends down to put the teapot down. The camera follows the male pannning to the left. In the background, louvre windows and dark brown curtains can be seen.\nThe scene csits in front of a fireplace, with her head slightly moving.uts to a female's back. She has grayish-white hair tied up in a ponytail, and wears a red dress adorned with white dots. She sits in front of a fireplace, with her head slightly moving.\nThe scene cuts back to the male again. He straightens up and turns to his left. He speaks as he walks towards the right side of the screen. The camera follows the male panning to the right.", "events": [ { "event": "The camera focuses on a middle-aged male wearing a dark green sweater with a white collar, holding a silver teapot in his hand. The male looks down and ahead, walking towards the left side of the screen while speaking. He bends down to put the teapot down. The camera follows the male pannning to the left. In the background, louvre windows and dark brown curtains can be seen.", "visual_elements": [ { "content": "The camera focuses on a middle-aged male wearing a dark green sweater with a white collar.", "type": "camera", "weight": 3 }, { "content": "The male holds a silver teapot in his hand.", "type": "attribute", "weight": 2 }, { "content": "The male looks down and ahead, walking towards the left side of the screen while speaking.", "type": "action", "weight": 3 }, { "content": "The male bends down to put the teapot down.", "type": "action", "weight": 3 }, { "content": "The camera follows the male pannning to the left.", "type": "camera", "weight": 3 }, { "content": "In the background, louvre windows and dark brown curtains can be seen.", "type": "scene", "weight": 2 } ] }, { "event": "The scene csits in front of a fireplace, with her head slightly moving.uts to a female's back. She has grayish-white hair tied up in a ponytail, and wears a red dress adorned with white dots. She sits in front of a fireplace, with her head slightly moving.", "visual_elements": [ { "content": "The scene cuts to a female's back.", "type": "camera", "weight": 3 }, { "content": "The female has grayish-white hair tied up in a ponytail", "type": "attribute", "weight": 2 }, { "content": "The female wears a red dress adorned with white dots.", "type": "attribute", "weight": 2 }, { "content": "The female sits in front of a fireplace, with her head slightly moving.", "type": "attribute", "weight": 3 } ] }, { "event": "The scene cuts back to the male again. He straightens up and turns to his left. He speaks as he walks towards the right side of the screen. The camera follows the male panning to the right.", "visual_elements": [ { "content": "The scene cuts back to the male again.", "type": "camera", "weight": 3 }, { "content": "The male straightens up and turns to his left.", "type": "action", "weight": 3 }, { "content": "He speaks as he walks towards the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The camera follows the male panning to the right.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0415", "video_path": "PerceptionTest/video_1005.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.92, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a table with many household items on it. In the background are blue and white lockers.\nThe camera then captures a man wearing a white plaid shirt and a gold ring on his hand as he puts a mineral water bottle into a backpack. The man opens a brown briefcase and puts a book with a cover that reads “Tata and Art - 150 Years” into the briefcase. The man then puts the blue plastic box into the backpack, then the man puts the fork into the briefcase, and the man puts the blue notebook into the backpack.\nFinally, the man zippers up the backpack and pats the briefcase before leaving the camera.", "events": [ { "event": "The video begins with the camera focusing on a table with many household items on it. In the background are blue and white lockers.", "visual_elements": [ { "content": "The camera focuses on a table.", "type": "camera", "weight": 3 }, { "content": "Many household items are on the tables.", "type": "scene", "weight": 2 }, { "content": "Blue and white lockers are in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then captures a man wearing a white plaid shirt and a gold ring on his hand as he puts a mineral water bottle into a backpack. The man opens a brown briefcase and puts a book with a cover that reads “Tata and Art - 150 Years” into the briefcase. The man then puts the blue plastic box into the backpack, then the man puts the fork into the briefcase, and the man puts the blue notebook into the backpack.", "visual_elements": [ { "content": "The camera catches a man wearing a white plaid shirt.", "type": "camera", "weight": 3 }, { "content": "The man has a gold ring on his hand.", "type": "attribute", "weight": 2 }, { "content": "The man puts a mineral water bottle into a backpack.", "type": "action", "weight": 2 }, { "content": "The man opens a brown briefcase.", "type": "action", "weight": 2 }, { "content": "The man puts a book with a cover that reads “Tata and Art - 150 Years” into the briefcase.", "type": "action", "weight": 2 }, { "content": "Then the man puts a blue plastic box into the backpack.", "type": "action", "weight": 2 }, { "content": "The man then puts a fork into the briefcase.", "type": "action", "weight": 2 }, { "content": "The man puts a blue notebook into the backpack.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the man zippers up the backpack and pats the briefcase before leaving the camera.", "visual_elements": [ { "content": "The man closes the zipper of the backpack and pats the briefcase.", "type": "action", "weight": 2 }, { "content": "Then the man leaves the camera.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0416", "video_path": "PerceptionTest/video_10121.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 17.32, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden table with a clear glass cup with a metal spoon in it. Next to the glass is a white charging cord and a charging head. In the center of the table are three inverted white plastic cups, with the cups arranged side by side. In the background, a person in light-colored clothing can be seen with his hands on the table.\nNext, the figure picks up the cups in turn, with the center cup revealing a small orange hexagonal object, and then places the cups back in place. The figure's hands move two of the cups to disrupt the order, repeating this pattern six times.", "events": [ { "event": "The video begins with the camera focusing on a wooden table with a clear glass cup with a metal spoon in it. Next to the glass is a white charging cord and a charging head. In the center of the table are three inverted white plastic cups, with the cups arranged side by side. In the background, a person in light-colored clothing can be seen with his hands on the table.", "visual_elements": [ { "content": "The camera is focused on a wooden table。", "type": "camera", "weight": 3 }, { "content": "A clear glass is on the table.", "type": "scene", "weight": 2 }, { "content": "There is a metal spoon in the glass.", "type": "attribute", "weight": 2 }, { "content": "Next to the glass is a white charging cable and a charging head.", "type": "attribute", "weight": 2 }, { "content": "In the center of the table are three inverted white plastic cups.", "type": "scene", "weight": 2 }, { "content": "The cups are arranged side by side.", "type": "attribute", "weight": 2 }, { "content": "In the background, there is a person wearing light-colored clothes.", "type": "scene", "weight": 2 }, { "content": "Hands are on the table.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the figure picks up the cups in turn, with the center cup revealing a small orange hexagonal object, and then places the cups back in place. The figure's hands move two of the cups to disrupt the order, repeating this pattern six times.", "visual_elements": [ { "content": "The figure picks up the cups in turn.", "type": "action", "weight": 3 }, { "content": "The center cup reveals a small orange pentagonal side object", "type": "attribute", "weight": 2 }, { "content": "The cups are then placed back in their original position.", "type": "action", "weight": 2 }, { "content": "The figure moves two of the cups at a time with both hands, disrupting the order of the cups.", "type": "action", "weight": 2 }, { "content": "This pattern is repeated six times.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0417", "video_path": "PerceptionTest/video_10161.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 12.53, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden table with two white and green cups, an orange doll on the right side of the cups, two pens on the right side of the dolls, a red apple and white ping pong ball at the top of the table.\nA male wearing a gray long-sleeved shirt and dark pants stands behind the table, the male's hands are above the table, the male places his hands on each side of the two paper cups, the male then picks up the green paper cup with his left hand with nothing on the bottom and puts it back in place. The male picks up the white paper cup with his right hand, there is also nothing at the bottom of the cup, and then puts it back in place. The male then picks up two paper cups with each hand and quickly switches their positions.", "events": [ { "event": "The video begins with the camera focusing on a wooden table with two white and green cups, an orange doll on the right side of the cups, two pens on the right side of the dolls, a red apple and white ping pong ball at the top of the table.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "Two white and green cups are on the table.", "type": "scene", "weight": 2 }, { "content": "To the right of the cups is an orange doll.", "type": "attribute", "weight": 2 }, { "content": "To the right of the doll are two pens.", "type": "attribute", "weight": 2 }, { "content": "At the top of the table are a red apple and a white egg.", "type": "attribute", "weight": 2 } ] }, { "event": "A male wearing a gray long-sleeved shirt and dark pants stands behind the table, the male's hands are above the table, the male places his hands on each side of the two paper cups, the male then picks up the green paper cup with his left hand with nothing on the bottom and puts it back in place. The male picks up the white paper cup with his right hand, there is also nothing at the bottom of the cup, and then puts it back in place. The male then picks up two paper cups with each hand and quickly switches their positions.", "visual_elements": [ { "content": "A man wearing a gray long-sleeved shirt and dark pants stands behind the table.", "type": "scene", "weight": 2 }, { "content": "The man's hands are above the table.", "type": "attribute", "weight": 2 }, { "content": "The man places his hands on each side of two paper cups.", "type": "attribute", "weight": 2 }, { "content": "The man then picks up the green cup with his left hand.", "type": "action", "weight": 3 }, { "content": "There is nothing on the bottom.", "type": "scene", "weight": 2 }, { "content": "The man puts the cup back in its original position.", "type": "action", "weight": 2 }, { "content": "The man picks up the white cup with his right hand.", "type": "action", "weight": 3 }, { "content": "There is nothing on the bottom of the cup either.", "type": "scene", "weight": 2 }, { "content": "Then the man puts it back in its original position.", "type": "action", "weight": 2 }, { "content": "The man presses his hands on the bottom of two paper cups.", "type": "action", "weight": 3 }, { "content": "The man quickly switches their positions.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0418", "video_path": "PerceptionTest/video_1019.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 19.8, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden cabinet with a few books, an iron and a hair dryer in front of it. The back wall is purple and the floor is wooden.\nThen, a hand appears on the right side of the frame, and the hand picks up a pink pen on the floor and taps a stack of books on the floor. The hand then moves the top book to the side, the hand moves the second book to the side as well, and the pile reveals a third book.\nNext, there is a black hair dryer with “SAFEWAY” written on it. The hand adjusts the switch of the hair dryer.", "events": [ { "event": "The video begins with the camera focusing on a wooden cabinet with a few books, an iron and a hair dryer in front of it. The back wall is purple and the floor is wooden.", "visual_elements": [ { "content": "The camera focuses on a wooden cabinet.", "type": "camera", "weight": 3 }, { "content": "In front of the cabinet are a few books, an iron and a hair dryer.", "type": "scene", "weight": 2 }, { "content": "The back wall is purple and the floor is wooden.", "type": "scene", "weight": 2 } ] }, { "event": "Then, a hand appears on the right side of the frame, and the hand picks up a pink pen on the floor and taps a stack of books on the floor. The hand then moves the top book to the side, the hand moves the second book to the side as well, and the pile reveals a third book.", "visual_elements": [ { "content": "A hand appears on the right side of the frame.", "type": "scene", "weight": 2 }, { "content": "The hand picks up a pink pen on the floor and taps a pile of books on the floor.", "type": "action", "weight": 3 }, { "content": "The hand moves the top book to the side.", "type": "action", "weight": 3 }, { "content": "The hand moves the second book to the other side.", "type": "attribute", "weight": 2 }, { "content": "A third book is revealed in the pile.", "type": "attribute", "weight": 1 } ] }, { "event": "Next, there is a black hair dryer with “SAFEWAY” written on it. The hand adjusts the switch of the hair dryer.", "visual_elements": [ { "content": "There is a black hair dryer in the picture.", "type": "scene", "weight": 2 }, { "content": "The word “SAFEWAY” is written on the hair dryer.", "type": "attribute", "weight": 1 }, { "content": "The hand adjusts the switch on the hair dryer.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0419", "video_path": "PerceptionTest/video_10297.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.92, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen countertop with some ingredients and utensils. Two round metal plates are positioned in the center, one containing vegetables and one empty. To the left of the plates is a white cup and black cylindrical jar, to the right of the plates is a large white bowl with four cubes of cut triangular bread, and to the right of the bowl is a kettle and bottle of oil.\nA male wearing a red plaid shirt stands behind the table, the male picks up two slices of bread from the bowl and places them on an empty metal plate, the male picks up two slices of tomato on the plate and places them on one of the bread slices, and then picks up two slices of cucumber and places them on top of the slices of tomato. The male then picks up some green lettuce leaves and places them on top of the cucumber slices, then the male places another slice of bread over the slice of bread that has had the vegetables placed on it, creating a sandwich then flattening it on the plate.\nFinally, the male picks up the black jar and twists its lid.", "events": [ { "event": "The video begins with the camera focusing on a kitchen countertop with some ingredients and utensils. Two round metal plates are positioned in the center, one containing vegetables and one empty. To the left of the plates is a white cup and black cylindrical jar, to the right of the plates is a large white bowl with four cubes of cut triangular bread, and to the right of the bowl is a kettle and bottle of oil.", "visual_elements": [ { "content": "The camera focuses on a kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "There are some ingredients and utensils on the counter.", "type": "scene", "weight": 2 }, { "content": "Two round metal plates are placed in the center.", "type": "scene", "weight": 2 }, { "content": "One contains vegetables.", "type": "attribute", "weight": 1 }, { "content": "One is empty.", "type": "attribute", "weight": 1 }, { "content": "To the left of the plates are a white cup and a black cylindrical jar.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the plate is a large white bowl.", "type": "attribute", "weight": 2 }, { "content": "In the bowl are four triangles of cut bread.", "type": "attribute", "weight": 2 }, { "content": "To the right of the bowl is a kettle and oil bottle", "type": "attribute", "weight": 2 } ] }, { "event": "A male wearing a red plaid shirt stands behind the table, the male picks up two slices of bread from the bowl and places them on an empty metal plate, the male picks up two slices of tomato on the plate and places them on one of the bread slices, and then picks up two slices of cucumber and places them on top of the slices of tomato. The male then picks up some green lettuce leaves and places them on top of the cucumber slices, then the male places another slice of bread over the slice of bread that has had the vegetables placed on it, creating a sandwich then flattening it on the plate.", "visual_elements": [ { "content": "A male wearing a pink plaid shirt stands behind the table", "type": "scene", "weight": 2 }, { "content": "The man picks up two slices of bread from the bowl", "type": "action", "weight": 2 }, { "content": "The man places it on an empty metal plate", "type": "action", "weight": 2 }, { "content": "The man picks up two slices of tomato on a plate.", "type": "action", "weight": 2 }, { "content": "The man places tomato on a slice of bread.", "type": "action", "weight": 2 }, { "content": "The man picks up two more cucumber slices and places them on a cucumber slice.", "type": "action", "weight": 2 }, { "content": "The man picks up some green lettuce leaves and places them on a slice of bread.", "type": "action", "weight": 2 }, { "content": "The man puts another slice of bread on top of the slice of bread with the vegetables already on it.", "type": "action", "weight": 2 }, { "content": "The man creates a sandwich and lays it flat on a plate.", "type": "scene", "weight": 3 } ] }, { "event": "Finally, the male picks up the black jar and twists its lid.", "visual_elements": [ { "content": "The man picks up a black jar and twists the lid.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0420", "video_path": "PerceptionTest/video_1038.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.05, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a kitchen countertop with a number of items on it. In the center of the frame is a slice of bread and a cucumber, to the right of the bread is a fork, and to the right of the fork is a red fruit tea container. Below the bread is a white cup with fresh water and above the bread is a black electric kettle. To the left of the cucumber are two clear plastic containers, the container on the left contains white sugar, the container on the right contains brown sugar, and there is a metal spoon in the bottom right corner of the picture.\nThe hand of a person dressed in red appears on the right side of the image, the hand holds a tea bag from a tea box. The hand rips the tea bag open and puts it into a white cup, then picks up a black electric kettle and pours water into the white cup, the kettle is put back in place. The hand then takes a metal spoon and stirs it back and forth into the container with the white sugar.", "events": [ { "event": "The video begins with the camera focusing on a kitchen countertop with a number of items on it. In the center of the frame is a slice of bread and a cucumber, to the right of the bread is a fork, and to the right of the fork is a red fruit tea container. Below the bread is a white cup with fresh water and above the bread is a black electric kettle. To the left of the cucumber are two clear plastic containers, the container on the left contains white sugar, the container on the right contains brown sugar, and there is a metal spoon in the bottom right corner of the picture.", "visual_elements": [ { "content": "The camera focuses on a kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "The kitchen countertop is covered with a number of items.", "type": "scene", "weight": 2 }, { "content": "In the center of the frame is a slice of bread and a cucumber.", "type": "scene", "weight": 2 }, { "content": "To the right of the bread is a fork.", "type": "attribute", "weight": 1 }, { "content": "To the right of the fork is a red fruit tea box.", "type": "attribute", "weight": 1 }, { "content": "Underneath the bread is a white cup containing water.", "type": "attribute", "weight": 2 }, { "content": "On top of the bread is a black electric kettle.", "type": "attribute", "weight": 2 }, { "content": "To the left of the cucumber are two clear plastic containers.", "type": "attribute", "weight": 2 }, { "content": "The top container contains white sugar.", "type": "attribute", "weight": 2 }, { "content": "The bottom container contains brown granules.", "type": "attribute", "weight": 2 }, { "content": "A metal spoon in the lower right corner of the picture.", "type": "scene", "weight": 2 } ] }, { "event": "The hand of a person dressed in red appears on the right side of the image, the hand holds a tea bag from a tea box. The hand rips the tea bag open and puts it into a white cup, then picks up a black electric kettle and pours water into the white cup, the kettle is put back in place. The hand then takes a metal spoon and stirs it back and forth into the container with the white sugar.", "visual_elements": [ { "content": "The hand of a man dressed in red appears on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "The hand is holding a tea bag from a tea box.", "type": "action", "weight": 2 }, { "content": "The hand rips open the tea bag and puts it into a white cup.", "type": "action", "weight": 2 }, { "content": "The man picks up a black electric kettle and pours water into the white cup.", "type": "action", "weight": 2 }, { "content": "The kettle is put back in its place.", "type": "action", "weight": 2 }, { "content": "The hand holds a metal fork and stirs it back and forth into a container of white sugar.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0421", "video_path": "PerceptionTest/video_10477.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 19.53, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a stainless steel pot placed on the floor next to a triangular framed table lamp. In the background is a white tiled wall and a wall with an electrical outlet with a power plug plugged into it and wires extending to the floor.\nA hand then enters the frame from the right, and the hand grabs the handle of the lid of the pot and takes it away to the ground, revealing a white loaf of bread inside the pot. The hand removes the bread from the pot and the whole of the bread appears in the frame, the bread is placed in a paper cup with a colorful pattern on it, the hand places the paper cup on the right side of the pot. Finally, the hand picks up the table lamp to adjust the base and puts it back.\nFinally, the hand picks up the lamp to adjust the base and puts it back.", "events": [ { "event": "The video begins with the camera focusing on a stainless steel pot placed on the floor next to a triangular framed table lamp. In the background is a white tiled wall and a wall with an electrical outlet with a power plug plugged into it and wires extending to the floor.", "visual_elements": [ { "content": "The camera focuses on a stainless steel pan sitting on the floor.", "type": "camera", "weight": 3 }, { "content": "Next to the steel pan is a table lamp with a triangular frame.", "type": "scene", "weight": 2 }, { "content": "In the background is a white tiled wall with electrical outlets.", "type": "scene", "weight": 2 }, { "content": "A power plug is inserted into the socket.", "type": "attribute", "weight": 1 }, { "content": "The cord extends to the floor.", "type": "attribute", "weight": 1 } ] }, { "event": "A hand then enters the frame from the right, and the hand grabs the handle of the lid of the pot and takes it away to the ground, revealing a white loaf of bread inside the pot. The hand removes the bread from the pot and the whole of the bread appears in the frame, the bread is placed in a paper cup with a colorful pattern on it, the hand places the paper cup on the right side of the pot. Finally, the hand picks up the table lamp to adjust the base and puts it back.", "visual_elements": [ { "content": "A hand enters the frame from the right.", "type": "scene", "weight": 2 }, { "content": "The hand grabs the lid of a pot by the handle and removes it to the ground.", "type": "action", "weight": 3 }, { "content": "A white loaf of bread is shown inside the pot.", "type": "attribute", "weight": 2 }, { "content": "The hand removes the bread from the pot.", "type": "action", "weight": 3 }, { "content": "The bread is shown in its entirety.", "type": "scene", "weight": 2 }, { "content": "The bread is placed in a paper cup.", "type": "attribute", "weight": 2 }, { "content": "The paper cup has a colorful pattern on it.", "type": "attribute", "weight": 1 }, { "content": "The camera is shaking.", "type": "camera", "weight": 3 }, { "content": "The hand places the cup on the right side of the pan.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the hand picks up the lamp to adjust the base and puts it back.", "visual_elements": [ { "content": "Finally the hand picks up the lamp and adjusts the base and puts it back down.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0422", "video_path": "PerceptionTest/video_10570.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.94, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a kitchen countertop, in the center of which there is a red and white cup and a glass bowl with sugar, behind the bowl is a black electric kettle, in front of the bowl are the three letters “TEA”, to the right of the bowl is a white milk jug with a flower pattern, and to the left of the cup is a red wrapped tea bag.\nA man in yellow short sleeves stood behind the counter, he took a tea bag from the tea bag with his right hand and put it into the cup, he took the kettle with his left hand and poured water into the cup, then he took the spoon in the glass bowl with his right hand and scooped two spoonfuls of sugar into the cup, and at last, he rearranged the decorations on the countertop with his hands to put the letters of TEA neatly. Finally, he used his hands to rearrange the “TEA” letters on the countertop.", "events": [ { "event": "At the beginning of the video, the camera focuses on a kitchen countertop, in the center of which there is a red and white cup and a glass bowl with sugar, behind the bowl is a black electric kettle, in front of the bowl are the three letters “TEA”, to the right of the bowl is a white milk jug with a flower pattern, and to the left of the cup is a red wrapped tea bag.", "visual_elements": [ { "content": "The camera focuses on a kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "In the center of the counter is a red and white cup and a glass bowl with sugar.", "type": "scene", "weight": 3 }, { "content": "Behind the bowl is a black electric kettle.", "type": "attribute", "weight": 2 }, { "content": "In front of the bowl are the letters “TEA”.", "type": "attribute", "weight": 2 }, { "content": "To the right of the bowl is a white kettle with a flower pattern.", "type": "attribute", "weight": 2 }, { "content": "To the left of the cup is a red tea bag.", "type": "attribute", "weight": 2 } ] }, { "event": "A man in yellow short sleeves stood behind the counter, he took a tea bag from the tea bag with his right hand and put it into the cup, he took the kettle with his left hand and poured water into the cup, then he took the spoon in the glass bowl with his right hand and scooped two spoonfuls of sugar into the cup, and at last, he rearranged the decorations on the countertop with his hands to put the letters of TEA neatly. Finally, he used his hands to rearrange the “TEA” letters on the countertop.", "visual_elements": [ { "content": "A man in yellow short sleeves stands behind the counter.", "type": "scene", "weight": 2 }, { "content": "With his right hand, the man takes a tea bag from the tea bag and puts it into the cup.", "type": "action", "weight": 2 }, { "content": "With his left hand, the man picks up an electric kettle and pours water into the cup.", "type": "action", "weight": 2 }, { "content": "With his right hand, the man took a spoon from a glass bowl and scooped two spoons of sugar into the cup.", "type": "action", "weight": 2 }, { "content": "Finally, this person uses both hands to rearrange the “TEA” decorations on the countertop.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0423", "video_path": "PerceptionTest/video_10609.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on the kitchen counter, in front of the camera is a white coffee cup, to the left of the cup is a clear glass bottle with water, to the right of the cup is a black kettle, behind the cup there is a small clear glass, to the right of the glass cup there is a small orange drinking glass jar with spices, to the left of the glass jar is a gas stove.\nA man appears from the left side of the front frame, the man holds a glass jar and pours water into the glass, his hand puts the glass jar onto the gas stove, his hand holds a tea bag into a white coffee cup mug, and then his hand picks up the kettle and pours it into the coffee cup.", "events": [ { "event": "At the beginning of the video, the camera is focused on the kitchen counter, in front of the camera is a white coffee cup, to the left of the cup is a clear glass bottle with water, to the right of the cup is a black kettle, behind the cup there is a small clear glass, to the right of the glass cup there is a small orange drinking glass jar with spices, to the left of the glass jar is a gas stove.", "visual_elements": [ { "content": "The camera focuses on the kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "In front of the camera is a white coffee cup.", "type": "scene", "weight": 3 }, { "content": "On the left side of the cup is a clear glass carafe containing water.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the mug is a black kettle.", "type": "attribute", "weight": 2 }, { "content": "Behind the cup is a small clear glass.", "type": "attribute", "weight": 2 }, { "content": "To the right of the glass is a small orange and a glass jar with spices.", "type": "attribute", "weight": 2 }, { "content": "To the left of the jar is a gas stove.", "type": "attribute", "weight": 2 } ] }, { "event": "A man appears from the left side of the front frame, the man holds a glass jar and pours water into the glass, his hand puts the glass jar onto the gas stove, his hand holds a tea bag into a white coffee cup mug, and then his hand picks up the kettle and pours it into the coffee cup.", "visual_elements": [ { "content": "A man appears from the left side of the front screen", "type": "scene", "weight": 2 }, { "content": "The man is holding a glass bottle and pouring water into the glass.", "type": "action", "weight": 2 }, { "content": "The hand puts the bottle on the gas stove.", "type": "action", "weight": 2 }, { "content": "The hand holds a tea bag and puts it into a white coffee cup.", "type": "action", "weight": 2 }, { "content": "The hand then picks up a kettle and pours it into a coffee cup.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0424", "video_path": "PerceptionTest/video_10653.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 18.75, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a pair of feet wearing red sneakers with laces hanging loosely on the floor. In the background is a white tile floor with a pink plastic cup with a red pen stuck in it on the left side and a patterned plate with a black pair of scissors on it on the right side.\nThen a pair of hands appear in the picture and begin to straighten the laces on the left foot, tying the laces into a bow, and then straightening the laces on the right to tie them.", "events": [ { "event": "The video begins with the camera focusing on a pair of feet wearing red sneakers with laces hanging loosely on the floor. In the background is a white tile floor with a pink plastic cup with a red pen stuck in it on the left side and a patterned plate with a black pair of scissors on it on the right side.", "visual_elements": [ { "content": "The camera focuses on a pair of feet wearing red sneakers.", "type": "camera", "weight": 3 }, { "content": "The laces of the shoes are hanging loosely on the floor.", "type": "scene", "weight": 3 }, { "content": "In the background is a white tile floor.", "type": "scene", "weight": 2 }, { "content": "On the left is a pink plastic cup.", "type": "attribute", "weight": 2 }, { "content": "Inside the plastic cup is a red pen", "type": "attribute", "weight": 2 }, { "content": "On the right is a patterned plate.", "type": "attribute", "weight": 2 }, { "content": "On the plate is a pair of black scissors", "type": "attribute", "weight": 2 } ] }, { "event": "Then a pair of hands appear in the picture and begin to straighten the laces on the left foot, tying the laces into a bow, and then straightening the laces on the right to tie them.", "visual_elements": [ { "content": "A pair of hands appears in the picture.", "type": "attribute", "weight": 2 }, { "content": "A hand starts to tie the shoelace of the right foot.", "type": "action", "weight": 2 }, { "content": "The person ties the laces in a bow.", "type": "action", "weight": 2 }, { "content": "The man then ties the left shoe.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0425", "video_path": "PerceptionTest/video_10682.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 26.61, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a table with two pieces of clothing folded together, the bottom shirt is gray with a bear design, the top shirt is pink, and on the right side of the shirt there is a blue pen stuck in a blue cup, and next to the cup there is a blue bottle cap and a clear jar. In the background is a kitchen with a person in a gray striped top standing behind a table.\nThe person picks up a pink shirt and folds it and puts it on the table, then picks up the gray shirt and folds it and puts it on top of the pink shirt, the person picks up a piece of white paper on the table that has some colorful drawings on it. The person folds the paper in half and places it next to the clothing.", "events": [ { "event": "The video begins with the camera focusing on a table with two pieces of clothing folded together, the bottom shirt is gray with a bear design, the top shirt is pink, and on the right side of the shirt there is a blue pen stuck in a blue cup, and next to the cup there is a blue bottle cap and a clear jar. In the background is a kitchen with a person in a gray striped top standing behind a table.", "visual_elements": [ { "content": "The camera focuses on a table.", "type": "camera", "weight": 3 }, { "content": "There are two pieces of clothing folded together on the table.", "type": "scene", "weight": 1 }, { "content": "The bottom dress is gray with a bear on it.", "type": "attribute", "weight": 2 }, { "content": "The top dress is pink.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the dress is a blue pen in a blue cup.", "type": "attribute", "weight": 2 }, { "content": "Next to the cup is a blue bottle cap and a clear jar.", "type": "attribute", "weight": 2 }, { "content": "The background is a kitchen.", "type": "scene", "weight": 2 }, { "content": "A man in a gray striped top is standing behind a table.", "type": "scene", "weight": 2 } ] }, { "event": "The person picks up a pink shirt and folds it and puts it on the table, then picks up the gray shirt and folds it and puts it on top of the pink shirt, the person picks up a piece of white paper on the table that has some colorful drawings on it. The person folds the paper in half and places it next to the clothing.", "visual_elements": [ { "content": "The man picks up a pink shirt and folds it on the table.", "type": "action", "weight": 2 }, { "content": "The man takes another gray shirt and folds it on top of the pink shirt.", "type": "action", "weight": 3 }, { "content": "The man picks up a piece of white paper on the table.", "type": "action", "weight": 2 }, { "content": "There are some colorful drawings on the paper.", "type": "attribute", "weight": 1 }, { "content": "The person folds the paper and puts it next to the clothes.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0426", "video_path": "PerceptionTest/video_1071.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 10.43, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a woman wearing a pink long-sleeved blouse, and the person's hands are resting on the table. In the center of the frame is a book with a cartoon racing car on the cover, a piece of paper on the left side of the book, and a purple book bag on the right side of the book. In front of the table, from left to right, are bananas, round badges, cars, and duck toys.\nThe man picks up the book on the table and the woman flips the book over to clip the piece of paper next to the book into the book and finally puts the book into the purple book bag.", "events": [ { "event": "The video begins with the camera focusing on a woman wearing a pink long-sleeved blouse, and the person's hands are resting on the table. In the center of the frame is a book with a cartoon racing car on the cover, a piece of paper on the left side of the book, and a purple book bag on the right side of the book. In front of the table, from left to right, are bananas, round badges, cars, and duck toys.", "visual_elements": [ { "content": "The camera focuses on a man wearing a purple long-sleeved shirt.", "type": "camera", "weight": 3 }, { "content": "The person's hands are on the table.", "type": "scene", "weight": 1 }, { "content": "In the center of the frame is a book with a cartoon race car on the cover.", "type": "scene", "weight": 2 }, { "content": "On the left side of the book is a piece of paper.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the book is a purple school bag.", "type": "attribute", "weight": 2 }, { "content": "In front of the table, from left to right, are a banana toy, a circle badge, a car, and a duck toy.", "type": "attribute", "weight": 2 } ] }, { "event": "The man picks up the book on the table and the woman flips the book over to clip the piece of paper next to the book into the book and finally puts the book into the purple book bag.", "visual_elements": [ { "content": "The person picks up the book on the table.", "type": "action", "weight": 2 }, { "content": "The person opens the book and slips the piece of paper next to the book into the book.", "type": "action", "weight": 2 }, { "content": "This person finally puts the book into a purple bag.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0427", "video_path": "PerceptionTest/video_10728.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 21.66, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a dark-colored table with a green tissue box, a pen to the left of the tissue box, and two books in front of the tissue box. In the background is a white wall with a green cleaning brush hanging on it, and on the other side of the wall is an electrical outlet and a switch. On the right side of the wall is an orange and yellow poster with some text on it.\nA male wearing a green t-shirt enters the frame, the male reaches down and picks up a book on the table and stacks it on top of another book, then places a pen and carton on top of the book. The male picks up a small white object on the table that has a cord attached to a socket and puts it back in place.", "events": [ { "event": "The video begins with the camera focusing on a dark-colored table with a green tissue box, a pen to the left of the tissue box, and two books in front of the tissue box. In the background is a white wall with a green cleaning brush hanging on it, and on the other side of the wall is an electrical outlet and a switch. On the right side of the wall is an orange and yellow poster with some text on it.", "visual_elements": [ { "content": "The camera focuses on a dark-colored table.", "type": "camera", "weight": 3 }, { "content": "On the table is a green tissue box.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the tissue box is a pen.", "type": "attribute", "weight": 3 }, { "content": "In front of the tissue box are two books.", "type": "scene", "weight": 1 }, { "content": "In the background is a white wall.", "type": "attribute", "weight": 2 }, { "content": "A green cleaning brush hangs on the wall.", "type": "attribute", "weight": 2 }, { "content": "On the other side of the wall is an electrical outlet and a switch.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the wall is an orange and yellow poster.", "type": "attribute", "weight": 2 }, { "content": "The poster has some text on it.", "type": "attribute", "weight": 2 } ] }, { "event": "A male wearing a green t-shirt enters the frame, the male reaches down and picks up a book on the table and stacks it on top of another book, then places a pen and carton on top of the book. The male picks up a small white object on the table that has a cord attached to a socket and puts it back in place.", "visual_elements": [ { "content": "A male wearing a green t-shirt enters the picture", "type": "scene", "weight": 2 }, { "content": "The male reaches down and picks up a book on the table and stacks it on top of another book.", "type": "action", "weight": 2 }, { "content": "The male places a pen and paper case on top of the book.", "type": "action", "weight": 2 }, { "content": "The male picks up a cord on the table and connects it to a small white object in a socket.", "type": "action", "weight": 2 }, { "content": "The male then puts the white object back in its place.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0428", "video_path": "PerceptionTest/video_10825.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 31.91, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a small table with a printed tablecloth, a green vegetable on the far left of the table, an egg in the center, and a metal spoon on the far right. In the background is a red carpet with a white tile floor underneath.\nA male wearing a black short-sleeved top and black shorts enters from the right side of the frame, the male sits on his knees in front of a table, the male pulls out a pink plastic cup from underneath the table and places it on the table. The male takes out another fluorescent colored plastic cup and places it next to the pink cup. The male then picks up a glass with the Starbucks logo and places it upside down behind the fluorescent colored cup. Next, the male picks up a white frosted plastic cup with a red rim and places it next to the Starbucks cup, and finally pulls out a clear plastic cup and places it next to the spoon as the male in the scene stands up and leaves the scene.", "events": [ { "event": "The video begins with the camera focusing on a small table with a printed tablecloth, a green vegetable on the far left of the table, an egg in the center, and a metal spoon on the far right. In the background is a red carpet with a white tile floor underneath.", "visual_elements": [ { "content": "The camera focuses on a small table covered with a printed tablecloth.", "type": "camera", "weight": 3 }, { "content": "On the far left of the table is a green vegetable.", "type": "scene", "weight": 1 }, { "content": "In the center is an egg.", "type": "scene", "weight": 2 }, { "content": "On the far right is a metal spoon.", "type": "scene", "weight": 2 }, { "content": "In the background is a red carpet.", "type": "scene", "weight": 2 }, { "content": "Underneath the carpet is a white tile floor.", "type": "attribute", "weight": 2 } ] }, { "event": "A male wearing a black short-sleeved top and black shorts enters from the right side of the frame, the male sits on his knees in front of a table, the male pulls out a pink plastic cup from underneath the table and places it on the table. The male takes out another fluorescent colored plastic cup and places it next to the pink cup. The male then picks up a glass with the Starbucks logo and places it upside down behind the fluorescent colored cup. Next, the male picks up a white frosted plastic cup with a red rim and places it next to the Starbucks cup, and finally pulls out a clear plastic cup and places it next to the spoon as the male in the scene stands up and leaves the scene.", "visual_elements": [ { "content": "A man wearing a black short-sleeved shirt and black shorts enters from the right side of the frame.", "type": "scene", "weight": 2 }, { "content": "The man sits on his knees in front of a table.", "type": "action", "weight": 2 }, { "content": "The male takes out a pink plastic cup from underneath the table and places it on the table.", "type": "action", "weight": 2 }, { "content": "The male takes out another fluorescent colored plastic cup and repeats the motion briefly.", "type": "action", "weight": 2 }, { "content": "The male then picks up a glass with the Starbucks logo on it.", "type": "action", "weight": 3 }, { "content": "The male places the glass upside down behind the fluorescent colored glass.", "type": "action", "weight": 1 }, { "content": "The male picks up a white frosted plastic cup.", "type": "action", "weight": 2 }, { "content": "The cup has a red rim.", "type": "attribute", "weight": 2 }, { "content": "The male places the plastic cup next to the Starbucks cup.", "type": "attribute", "weight": 2 }, { "content": "Finally the male takes out a clear plastic cup and places it next to a spoon.", "type": "action", "weight": 2 }, { "content": "The male stands up and leaves the frame.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0429", "video_path": "PerceptionTest/video_10874.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 24.48, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a table covered with a floral patterned tablecloth. In the center of the table is a book, behind the book is a red decorative jar, to the right of the book is a silver handbag, to the right of the book are two bills, behind the bills is a small white model of a television set, and to the right of the bills is a green stuffed animal. A woman wearing a gray stuffed jacket appears in the frame, and the woman stands behind the table.\nThe camera then continues to move to the right as the female picks up a book on the table and turns the pages, the female picks up the bill on the table and puts it into the pages of the book, and then the female picks up a silver handbag and puts the book into the bag.", "events": [ { "event": "The video begins with the camera focusing on a table covered with a floral patterned tablecloth. In the center of the table is a book, behind the book is a red decorative jar, to the right of the book is a silver handbag, to the right of the book are two bills, behind the bills is a small white model of a television set, and to the right of the bills is a green stuffed animal. A woman wearing a gray stuffed jacket appears in the frame, and the woman stands behind the table.", "visual_elements": [ { "content": "The camera focuses on a table with a floral tablecloth.", "type": "camera", "weight": 3 }, { "content": "A book is placed in the center of the table.", "type": "scene", "weight": 2 }, { "content": "Below the book is a red decorative jar.", "type": "attribute", "weight": 2 }, { "content": "To the left of the book is a silver handbag.", "type": "attribute", "weight": 2 }, { "content": "To the left of the book are two banknotes.", "type": "attribute", "weight": 2 }, { "content": "Underneath the banknotes is a small white model of a television set.", "type": "attribute", "weight": 2 }, { "content": "To the right of the banknotes is a green stuffed animal.", "type": "attribute", "weight": 2 }, { "content": "A woman wearing a gray stuffed jacket appears in the frame.", "type": "scene", "weight": 2 }, { "content": "The woman is standing behind a table.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera then continues to move to the right as the female picks up a book on the table and turns the pages, the female picks up the bill on the table and puts it into the pages of the book, and then the female picks up a silver handbag and puts the book into the bag.", "visual_elements": [ { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "The woman picks up a book on the table and turns the pages.", "type": "action", "weight": 2 }, { "content": "Woman picks up a bill from the table and puts it into the pages of the book.", "type": "action", "weight": 2 }, { "content": "The woman puts the book into her bag.", "type": "action", "weight": 2 }, { "content": "Woman then picks up silver handbag and puts book in bag", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0430", "video_path": "PerceptionTest/video_10962.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 33.31, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a white tabletop with two letters “PT” on the far left side of the table, and ingredients and items on the right side of the table.\nA male wearing a black short-sleeved shirt enters the frame and walks to the front of the table to take the lettuce on the right side of the bowl and tears it into small pieces and puts it into an empty white bowl, then the male picks up the slices of cucumber and tomato on the small plate in front of the bowl and puts them into the bowl one by one. The male picks up the condiment bottle at the front of the table and pours some of the liquid product into the bowl, and finally the male picks up a spoon and begins to stir the ingredients in the bowl.\nThe male switches the cardboard letters “P” and “T” on the table and leaves the scene.", "events": [ { "event": "At the beginning of the video, the camera focuses on a white tabletop with two letters “PT” on the far left side of the table, and ingredients and items on the right side of the table.", "visual_elements": [ { "content": "The camera focuses on a white tabletop.", "type": "camera", "weight": 3 }, { "content": "On the far left side of the table are the letters “PT”.", "type": "scene", "weight": 2 }, { "content": "On the right side of the table are some ingredients and items.", "type": "attribute", "weight": 2 } ] }, { "event": "A male wearing a black short-sleeved shirt enters the frame and walks to the front of the table to take the lettuce on the right side of the bowl and tears it into small pieces and puts it into an empty white bowl, then the male picks up the slices of cucumber and tomato on the small plate in front of the bowl and puts them into the bowl one by one. The male picks up the condiment bottle at the front of the table and pours some of the liquid product into the bowl, and finally the male picks up a spoon and begins to stir the ingredients in the bowl.", "visual_elements": [ { "content": "A man wearing a black short-sleeved shirt enters the frame.", "type": "scene", "weight": 3 }, { "content": "The male walks to the table and takes the lettuce on the right side of the bowl.", "type": "action", "weight": 2 }, { "content": "The male tears the lettuce into small pieces and puts them into an empty white bowl.", "type": "action", "weight": 2 }, { "content": "The male then picks up the cucumber and tomato slices from the small plate in front of the bowl.", "type": "action", "weight": 2 }, { "content": "The male puts the ingredients into the bowl one by one.", "type": "action", "weight": 2 }, { "content": "The male picks up the condiment bottle at the front of the table.", "type": "action", "weight": 2 }, { "content": "The male pours some liquid into the bowl.", "type": "action", "weight": 2 }, { "content": "Finally, the male picks up a spoon and starts to stir the ingredients in the bowl.", "type": "action", "weight": 2 } ] }, { "event": "The male switches the cardboard letters “P” and “T” on the table and leaves the scene.", "visual_elements": [ { "content": "The male switches the cardboard letters “P” and “T” on the countertop.", "type": "action", "weight": 3 }, { "content": "Finally the male leaves the scene.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0431", "video_path": "PerceptionTest/video_11156.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 33.34, "resolution": { "width": 1280, "height": 960 }, "caption": "At the beginning of the video, the camera focuses on the upper torso of a woman who wears a black leather jacket over a black top, with a Chanel-logo necklace around her neck. She sits at a wooden table with blank paper and a set of keys on it. Some office supplies and documents can be seen in the background.\nThe camera remains fixed as the woman's right hand stretches towards the left side of the scene to grab something. Then, her right hand picks up a pen from the right side of the blank paper. The woman's left hand enters the scene from the right side, with a watch on her wrist and several rings on her fingers. She draws a small circle, a large circle, and a triangle on the paper, starting from her right side. She holds the paper up vertically to show it to the camera, then puts it back on the table.\nThe woman brings in a red bottle cap from the right side of the scene and places it on top of the small circle on the paper. Then, she brings in a purple bottle cap and puts it on top of the large circle. Finally, she brings in a green triangular plastic piece and places it on top of the triangle on the paper. At the end of the video, the woman stretches her right hand towards the back of the camera from the left side of the scene.", "events": [ { "event": "At the beginning of the video, the camera focuses on the upper torso of a woman who wears a black leather jacket over a black top, with a Chanel-logo necklace around her neck. She sits at a wooden table with blank paper and a set of keys on it. Some office supplies and documents can be seen in the background.", "visual_elements": [ { "content": "The camera focuses on the upper torso of a woman.", "type": "camera", "weight": 3 }, { "content": "The woman wears a black leather jacket over a black top.", "type": "attribute", "weight": 3 }, { "content": "A necklace with the Chanel logo adorns her neck.", "type": "attribute", "weight": 2 }, { "content": "The woman sits at a wooden table.", "type": "attribute", "weight": 3 }, { "content": "There are blank sheets of paper and a set of keys on the table.", "type": "attribute", "weight": 3 }, { "content": "In the background, some office supplies and documents can be seen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera remains fixed as the woman's right hand stretches towards the left side of the scene to grab something. Then, her right hand picks up a pen from the right side of the blank paper. The woman's left hand enters the scene from the right side, with a watch on her wrist and several rings on her fingers. She draws a small circle, a large circle, and a triangle on the paper, starting from her right side. She holds the paper up vertically to show it to the camera, then puts it back on the table.", "visual_elements": [ { "content": "The woman's right hand stretches towards the left side of the scene, as if to reach for something.", "type": "action", "weight": 3 }, { "content": "Then, the woman's  right hand stretches towards the left side of the scene to grab something.", "type": "action", "weight": 3 }, { "content": "The woman's left hand enters the scene from the right side.", "type": "action", "weight": 3 }, { "content": "A watch is on the woman's left wrist, and several rings are on her left hand.", "type": "attribute", "weight": 2 }, { "content": "The woman draws a small circle, a large circle, and a triangle on the paper, starting from her right side.", "type": "action", "weight": 3 }, { "content": "The woman holds the drawn paper up vertically to show it to the camera.", "type": "action", "weight": 3 }, { "content": "The woman puts the paper back on the table.", "type": "action", "weight": 2 } ] }, { "event": "The woman brings in a red bottle cap from the right side of the scene and places it on top of the small circle on the paper. Then, she brings in a purple bottle cap and puts it on top of the large circle. Finally, she brings in a green triangular plastic piece and places it on top of the triangle on the paper. At the end of the video, the woman stretches her right hand towards the back of the camera from the left side of the scene.", "visual_elements": [ { "content": "The woman brings in a red bottle cap from the right side and places it on top of the small circle on the paper.", "type": "action", "weight": 3 }, { "content": "Then, the woman brings in a purple bottle cap and puts it on top of the large circle.", "type": "action", "weight": 3 }, { "content": "Finally, the woman brings in a green triangular plastic piece and places it on top of the triangle on the paper.", "type": "action", "weight": 3 }, { "content": "At the end of the video, the woman stretches her right hand towards the back of the camera from the left side of the scene.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0432", "video_path": "PerceptionTest/video_11157.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 23.89, "resolution": { "width": 1920, "height": 896 }, "caption": "At the start of the video, a table covered with a white checkered tablecloth appears on the screen, with a blue plate on it. A person dressed in pink stands on the right side of the table, with her hands placed flat on it.\nNext, the person turns to the side, and her left hand momentarily disappears from the right edge of the scene. When it reappears, it holds a bottle of soy sauce with a red cap. The soy sauce bottle is then transferred from her left hand to her right, while her left hand once again vanishes from sight. The person first holds the soy sauce bottle in the palm of her right hand and shows it to the camera before placing it near the blue plate.\nSubsequently, her right hand reaches towards the right side of the screen to receive a transparent liquid bottle with a yellow cap. She places this transparent liquid bottle beside the soy sauce bottle. Then, her right hand receives a carrot from the right side of the screen. The person first displays the carrot in the palm of her hand before placing it next to the transparent liquid bottle. At this moment, the soy sauce bottle, the transparent liquid bottle, and the carrot are arranged sequentially from left to right on the table, with the blue plate positioned behind them. The backdrop remains a white wall throughout.", "events": [ { "event": "At the start of the video, a table covered with a white checkered tablecloth appears on the screen, with a blue plate on it. A person dressed in pink stands on the right side of the table, with her hands placed flat on it.", "visual_elements": [ { "content": "A table covered with a white checkered tablecloth appears.", "type": "attribute", "weight": 3 }, { "content": "A blue plate is on the table.", "type": "attribute", "weight": 3 }, { "content": "A person dressed in pink stands on the right side of the table.", "type": "attribute", "weight": 3 }, { "content": "The person's hands rest on the table.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, the person turns to the side, and her left hand momentarily disappears from the right edge of the scene. When it reappears, it holds a bottle of soy sauce with a red cap. The soy sauce bottle is then transferred from her left hand to her right, while her left hand once again vanishes from sight. The person first holds the soy sauce bottle in the palm of her right hand and shows it to the camera before placing it near the blue plate.", "visual_elements": [ { "content": "The person in the scene turns to the side, and the left hand disappears from the right side.", "type": "action", "weight": 3 }, { "content": "When the left hand reappears, it  holds a bottle of soy sauce with a red cap.", "type": "action", "weight": 3 }, { "content": "The soy sauce bottle is transferred from the left hand to the right hand.", "type": "action", "weight": 3 }, { "content": "The left hand disappears again.", "type": "action", "weight": 2 }, { "content": "The person first holds the soy sauce bottle in her right hand and shows it to the camera.", "type": "action", "weight": 3 }, { "content": "The person then places the soy sauce bottle near the blue plate.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, her right hand reaches towards the right side of the screen to receive a transparent liquid bottle with a yellow cap. She places this transparent liquid bottle beside the soy sauce bottle. Then, her right hand receives a carrot from the right side of the screen. The person first displays the carrot in the palm of her hand before placing it next to the transparent liquid bottle. At this moment, the soy sauce bottle, the transparent liquid bottle, and the carrot are arranged sequentially from left to right on the table, with the blue plate positioned behind them. The backdrop remains a white wall throughout.", "visual_elements": [ { "content": "The person's right hand reaches towards the right side of the screen to receive a transparent liquid bottle with a yellow cap.", "type": "action", "weight": 3 }, { "content": "The person places this transparent liquid bottle beside the soy sauce bottle.", "type": "action", "weight": 3 }, { "content": "The person's right hand receives a carrot from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The person first displays the carrot in the palm of her hand.", "type": "action", "weight": 3 }, { "content": "The person then places the carrot next to the transparent liquid bottle.", "type": "action", "weight": 3 }, { "content": "The soy sauce bottle, the transparent liquid bottle, and the carrot are arranged sequentially from left to right on the table.", "type": "attribute", "weight": 3 }, { "content": "The blue plate is positioned behind the condiments and the carrot.", "type": "attribute", "weight": 3 }, { "content": "The background remains a white wall throughout.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0433", "video_path": "PerceptionTest/video_11170.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 20.62, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the start of the video, the camera captures a top-down view of a wooden table. On the left side of the table, there is a glass jar. Inside the jar, there is an orange object with a silver fork pressed onto it. To the right of the jar, there is a pile of clothes. The background features a white wall, with white brick decorations on the left side of the screen. A white lace curtain hangs over the window, revealing a white radiator behind it.\nA man dressed in a gray and blue striped T-shirt and black pants enters the scene from the right. He walks over to the table and starts organizing the clothes on it. He first picks up a black piece of clothing, unfolds it, inspects it, and then folds it twice before placing it to the right of the original pile. Afterwards, the man picks up a blue sock, carefully folds it twice, and places it atop the black clothing.\nSubsequently, the man exits the scene from the right. The clothes on the table are now divided into two piles: on the left, a pile of unfolded green clothes, and on the right, a folded stack of black clothing and blue sockS.", "events": [ { "event": "At the start of the video, the camera captures a top-down view of a wooden table. On the left side of the table, there is a glass jar. Inside the jar, there is an orange object with a silver fork pressed onto it. To the right of the jar, there is a pile of clothes. The background features a white wall, with white brick decorations on the left side of the screen. A white lace curtain hangs over the window, revealing a white radiator behind it.", "visual_elements": [ { "content": "The camera captures a top-down view of a wooden table.", "type": "camera", "weight": 3 }, { "content": "On the left side of the table, there is a glass jar.", "type": "attribute", "weight": 3 }, { "content": "Inside the jar, there is an orange object.", "type": "attribute", "weight": 2 }, { "content": "A silver fork is pressed onto the orange object.", "type": "attribute", "weight": 1 }, { "content": "To the right of the jar, there is a pile of clothes.", "type": "attribute", "weight": 3 }, { "content": "The background features a white wall.", "type": "scene", "weight": 3 }, { "content": "White brick decorations are on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "A white lace curtain hangs over the window.", "type": "scene", "weight": 2 }, { "content": "A white radiator is revealed behind the curtain.", "type": "scene", "weight": 2 } ] }, { "event": "A man dressed in a gray and blue striped T-shirt and black pants enters the scene from the right. He walks over to the table and starts organizing the clothes on it. He first picks up a black piece of clothing, unfolds it, inspects it, and then folds it twice before placing it to the right of the original pile. Afterwards, the man picks up a blue sock, carefully folds it twice, and places it atop the black clothing.", "visual_elements": [ { "content": "A man dressed in a gray and blue striped T-shirt and black pants enters the scene from the right.", "type": "action", "weight": 3 }, { "content": "The man walks over to the table.", "type": "action", "weight": 3 }, { "content": "The man picks up a black piece of clothing, unfolds it, inspects it.", "type": "action", "weight": 3 }, { "content": "The man folds it twice and places it to the right of the original pile.", "type": "action", "weight": 3 }, { "content": "Afterwards, the man picks up a blue sock.", "type": "action", "weight": 3 }, { "content": "The man carefully folds it twice and places it atop the black clothing.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the man exits the scene from the right. The clothes on the table are now divided into two piles: on the left, a pile of unfolded green clothes, and on the right, a folded stack of black clothing and blue sockS.", "visual_elements": [ { "content": "The man exits the scene from the right.", "type": "action", "weight": 3 }, { "content": "The clothes on the table are divided into two piles.", "type": "attribute", "weight": 3 }, { "content": "The pile on the left consists of unfolded green clothing.", "type": "attribute", "weight": 3 }, { "content": "The pile on the right consists of folded black clothing and blue socks.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0434", "video_path": "PerceptionTest/video_11226.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.97, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera overlooks a white table. On the table, there lies a white piece of paper with a pink highlighter pen placed on it. In the top left corner of the scene, there is a glass cup with a banana and a red pencil inserted inside. Also in the top left corner, a keyboard of a black laptop is partially visible. In the top right corner, there is a small cactus potted plant, and on the right side of the scene, there is a notebook with a patterned cover, and a white mobile phone is placed on top of it.\nA pair of hands enters the scene from the bottom left corner. The right hand picks up the pink highlighter pen. The left hand holds the white paper steady while the right hand moves the pen across the paper from left to right, eventually completing the word \"STAND\".\nAfter writing, the right hand puts down the pink highlighter pen. The right hand then picks up the paper with \"STAND\" written on it and shows it to the camera. Following that, the right hand places the paper back on the table. The left hand places itself on the left side of the paper to hold it steady, and the right hand picks up the pink highlighter pen again and presses it on the top right corner of the paper.\nTwo hands enter the scene from outside the left side, holding several red-and-white letter cards. The left hand holds a bundle of cards, while the right hand places the letter cards on the white paper one by one, spelling out the word \"STAND\". The two hands arrange each letter card neatly to ensure they match the order of the word on the paper. The two hands then leave the scene from the bottom.", "events": [ { "event": "At the beginning of the video, the camera overlooks a white table. On the table, there lies a white piece of paper with a pink highlighter pen placed on it. In the top left corner of the scene, there is a glass cup with a banana and a red pencil inserted inside. Also in the top left corner, a keyboard of a black laptop is partially visible. In the top right corner, there is a small cactus potted plant, and on the right side of the scene, there is a notebook with a patterned cover, and a white mobile phone is placed on top of it.", "visual_elements": [ { "content": "The camera overlooks a top-down view of a white table.", "type": "camera", "weight": 3 }, { "content": "On the table, there lies a white piece of paper.", "type": "attribute", "weight": 3 }, { "content": "A pink highlighter pen is placed on the paper.", "type": "attribute", "weight": 3 }, { "content": "In the top left corner of the scene,  there is a glass cup.", "type": "attribute", "weight": 2 }, { "content": "Inside the glass cup, a banana and a red pencil are inserted.", "type": "attribute", "weight": 1 }, { "content": "In the top left corner of the scene, a keyboard of a black laptop is partially visible.", "type": "attribute", "weight": 1 }, { "content": "In the top right corner of the scene,  there is a small cactus potted plant.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the scene, there is a notebook with a patterned cover.", "type": "attribute", "weight": 2 }, { "content": "On top of the notebook, a white mobile phone is placed.", "type": "attribute", "weight": 1 } ] }, { "event": "A pair of hands enters the scene from the bottom left corner. The right hand picks up the pink highlighter pen. The left hand holds the white paper steady while the right hand moves the pen across the paper from left to right, eventually completing the word \"STAND\".", "visual_elements": [ { "content": "A pair of hands enters the scene from the bottom left corner.", "type": "action", "weight": 3 }, { "content": "The right hand picks up the pink highlighter pen.", "type": "action", "weight": 3 }, { "content": "The left hand holds the white paper steady.", "type": "attribute", "weight": 2 }, { "content": "The right hand moves the pen across the paper.", "type": "action", "weight": 3 }, { "content": "The right hand completes the word \"STAND\" eventually.", "type": "attribute", "weight": 3 } ] }, { "event": "After writing, the right hand puts down the pink highlighter pen. The right hand then picks up the paper with \"STAND\" written on it and shows it to the camera. Following that, the right hand places the paper back on the table. The left hand places itself on the left side of the paper to hold it steady, and the right hand picks up the pink highlighter pen again and presses it on the top right corner of the paper.", "visual_elements": [ { "content": "The right hand puts down the pink highlighter pen.", "type": "action", "weight": 2 }, { "content": "The right hand picks up the paper with \"STAND\" written on it and shows it to the camera.", "type": "action", "weight": 3 }, { "content": "The right hand places the paper back on the table.", "type": "action", "weight": 2 }, { "content": "The left hand holds the paper steady on the left side.", "type": "attribute", "weight": 2 }, { "content": "The right hand picks up the pink highlighter pen and presses it on the top right corner of the paper.", "type": "action", "weight": 3 } ] }, { "event": "Two hands enter the scene from outside the left side, holding several red-and-white letter cards. The left hand holds a bundle of cards, while the right hand places the letter cards on the white paper one by one, spelling out the word \"STAND\". The two hands arrange each letter card neatly to ensure they match the order of the word on the paper. The two hands then leave the scene from the bottom.", "visual_elements": [ { "content": "Two hands enter the scene from outside the left side, holding several red-and-white letter cards.", "type": "action", "weight": 3 }, { "content": "The left hand holds a bundle of cards.", "type": "action", "weight": 2 }, { "content": "The right hand places the letter cards on the paper, spelling out \"STAND\".", "type": "action", "weight": 3 }, { "content": "The two hands arrange each letter card neatly to ensure they match the order of the word on the paper.", "type": "action", "weight": 3 }, { "content": "The two hands leave the scene from the bottom.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0435", "video_path": "PerceptionTest/video_11254.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 11.73, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a wooden table with a piece of black fabric placed on it. On the left side of the scene, there is a transparent glass cup with a ballpoint pen lying horizontally on it. Behind the cup, there is an iron placed. A person dressed in dark clothing stands behind the table. A person's hands are on the black fabric on the table. The walls and corners of some furniture can be seen in the background.\nThe person uses both hands to lift the black fabric and then spreads it flat on the tabletop. The left hand presses down on one side of the fabric, while the right hand picks up the iron and moves it back and forth above the fabric. The electrical cord of the iron appears on the left side of the scene, showing the iron's movement.\nThe person puts the iron back in its place. The body leans forward, with the right hand still gripping the iron's handle. The left hand moves away from the right side of the scene, reaching towards the back of the camera.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wooden table with a piece of black fabric placed on it. On the left side of the scene, there is a transparent glass cup with a ballpoint pen lying horizontally on it. Behind the cup, there is an iron placed. A person dressed in dark clothing stands behind the table. A person's hands are on the black fabric on the table. The walls and corners of some furniture can be seen in the background.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "On the table, a piece of black fabric is placed.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the scene, there is a transparent glass cup.", "type": "attribute", "weight": 1 }, { "content": "A ballpoint pen lies horizontally on the glass cup.", "type": "attribute", "weight": 1 }, { "content": "Behind the cup, there is an iron placed.", "type": "attribute", "weight": 3 }, { "content": "A person dressed in dark clothing stands behind the table.", "type": "attribute", "weight": 3 }, { "content": "This person's hands are on the black fabric on the table.", "type": "attribute", "weight": 2 }, { "content": "The walls and corners of some furniture can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "The person uses both hands to lift the black fabric and then spreads it flat on the tabletop. The left hand presses down on one side of the fabric, while the right hand picks up the iron and moves it back and forth above the fabric. The electrical cord of the iron appears on the left side of the scene, showing the iron's movement.", "visual_elements": [ { "content": "The person uses both hands to lift the black fabric.", "type": "action", "weight": 3 }, { "content": "The person then spreads the black fabric flat on the tabletop.", "type": "action", "weight": 3 }, { "content": "The left hand presses down on one side of the fabric, while the right hand picks up the iron and moves it back and forth above the fabric.", "type": "action", "weight": 3 }, { "content": "The electrical cord of the iron appears on the left side of the scene, showing the iron's movement.", "type": "attribute", "weight": 2 } ] }, { "event": "The person puts the iron back in its place. The body leans forward, with the right hand still gripping the iron's handle. The left hand moves away from the right side of the scene, reaching towards the back of the camera.", "visual_elements": [ { "content": "The person puts the iron back in its place.", "type": "action", "weight": 3 }, { "content": "The body leans forward, with the right hand still on the iron's handle.", "type": "action", "weight": 2 }, { "content": "The left hand moves away from the right side, reaching towards the back of the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0436", "video_path": "PerceptionTest/video_11301.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 16.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera overlooks a table. A wooden table is covered with a black rectangular tablecloth. On the left side of the screen, there is a book with an orange cover titled \"SHARPENING ACADEMIC SKILLS.\" To the right of the book, three upside-down paper cups are arranged at equal intervals. A person wearing a light-colored long-sleeved shirt has their left hand resting on the table, with the right hand about to touch the first cup on the rightmost side of the screen.\nStarting from the cup on the rightmost side, the person uses their right hand to pick up each of the three cups in sequence and then places them back in their original positions. Underneath the middle cup, there is an orange ball. Afterwards, the person picks up the book on the rightmost side of the screen. With the left hand holding the book steady, the right hand quickly flips through the pages. Some pages have white sheets of paper inserted between them. After finishing flipping through the book, the person returns it to its original position. At the end of the video, the right hand reaches towards the first cup on the rightmost side. The camera follows the person's movements left and right.", "events": [ { "event": "At the beginning of the video, the camera overlooks a table. A wooden table is covered with a black rectangular tablecloth. On the left side of the screen, there is a book with an orange cover titled \"SHARPENING ACADEMIC SKILLS.\" To the right of the book, three upside-down paper cups are arranged at equal intervals. A person wearing a light-colored long-sleeved shirt has their left hand resting on the table, with the right hand about to touch the first cup on the rightmost side of the screen.", "visual_elements": [ { "content": "The camera overlooks a table.", "type": "camera", "weight": 3 }, { "content": "A wooden table is covered with a black rectangular tablecloth.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the scene, there is a book with an orange cover.", "type": "attribute", "weight": 3 }, { "content": "The title of the book is \"SHARPENING ACADEMIC SKILLS\".", "type": "attribute", "weight": 2 }, { "content": "To the right of the book, three upside-down paper cups are arranged at equal intervals.", "type": "attribute", "weight": 3 }, { "content": "A person wearing a light-colored long-sleeved shirt has their left hand resting on the table.", "type": "attribute", "weight": 2 }, { "content": "The right hand is about to touch the first cup on the rightmost side of the scene.", "type": "attribute", "weight": 2 } ] }, { "event": "Starting from the cup on the rightmost side, the person uses their right hand to pick up each of the three cups in sequence and then places them back in their original positions. Underneath the middle cup, there is an orange ball. Afterwards, the person picks up the book on the rightmost side of the screen. With the left hand holding the book steady, the right hand quickly flips through the pages. Some pages have white sheets of paper inserted between them. After finishing flipping through the book, the person returns it to its original position. At the end of the video, the right hand reaches towards the first cup on the rightmost side. The camera follows the person's movements left and right.", "visual_elements": [ { "content": "Starting from the cup on the rightmost side, the person uses their right hand to lift each of the three cups in sequence and then place them back in their original positions.", "type": "action", "weight": 3 }, { "content": "Beneath the middle cup, there is a small orange ball.", "type": "attribute", "weight": 3 }, { "content": "Subsequently, the person picks up the book on the rightmost side of the scene.", "type": "action", "weight": 3 }, { "content": "With the left hand holding the book steady, the right hand quickly flips through the pages.", "type": "action", "weight": 3 }, { "content": "Some pages have white sheets of paper inserted between them.", "type": "attribute", "weight": 2 }, { "content": "After finishing flipping through the book, the person returns it to its original position.", "type": "action", "weight": 3 }, { "content": "At the end of the video, the right hand reaches towards the first cup on the rightmost side.", "type": "action", "weight": 3 }, { "content": "The camera follows the person's movements left and right.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0437", "video_path": "PerceptionTest/video_11302.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The camera focuses on a black barstool situated in an indoor environment. The background wall is a combination of white and light green. Inside the room with the light green wall, a white cabinet enters the scene.The floor is covered with patterned white tiles. Next to the stool and leaning against the wall, there is a black suitcase with two zippered pockets.\nA person holding a black baseball cap enters the scene from the right side. The hand moves to show the front and side of the cap, which has a golden logo on it. The hand holding the cap exits the scene from the right side.\nA black short-sleeved shirt enters the scene from above. The shirt is adorned with red and gray letters and a white icon, with the red letters \"AIR\" printed over the gray letters \"MAX.\" The black short-sleeved shirt exits the scene from above.\nA black hoodie enters the scene from above. The hoodie boasts white letters \"TM\" and \"SPORTSWEAR TM\". The black hoodie exits the scene from above.\nA person holding a black hair straightener enters the scene from the right side. The handle and cord of the hair straightener are pink. The hand holding the hair straightener exits the scene from the right side.\nA hand places the black baseball cap, black hoodie, and hair straightener onto the black barstool in sequence. After completing these actions, the hand exits the scene from above.", "events": [ { "event": "The camera focuses on a black barstool situated in an indoor environment. The background wall is a combination of white and light green. Inside the room with the light green wall, a white cabinet enters the scene.The floor is covered with patterned white tiles. Next to the stool and leaning against the wall, there is a black suitcase with two zippered pockets.", "visual_elements": [ { "content": "The camera focuses on a black barstool.", "type": "camera", "weight": 3 }, { "content": "The stool is situated in an indoor environmen.", "type": "attribute", "weight": 2 }, { "content": "The background wall is a combination of white and light green.", "type": "scene", "weight": 2 }, { "content": "Inside the room with the light green wall, a white cabinet enters the scene.", "type": "scene", "weight": 1 }, { "content": "The floor is covered with patterned white tiles.", "type": "scene", "weight": 2 }, { "content": "Next to the stool and leaning against the wall, there is a black suitcase.", "type": "scene", "weight": 2 }, { "content": "The suitcase features two zippered pockets.", "type": "attribute", "weight": 1 } ] }, { "event": "A person holding a black baseball cap enters the scene from the right side. The hand moves to show the front and side of the cap, which has a golden logo on it. The hand holding the cap exits the scene from the right side.", "visual_elements": [ { "content": "A person holding a black baseball cap enters the scene from the right side.", "type": "action", "weight": 3 }, { "content": "The hand moves to show the front and side of the cap.", "type": "action", "weight": 3 }, { "content": "A golden logo is emblazoned on the cap.", "type": "attribute", "weight": 2 }, { "content": "The hand holding the cap exits the scene from the right side.", "type": "action", "weight": 3 } ] }, { "event": "A black short-sleeved shirt enters the scene from above. The shirt is adorned with red and gray letters and a white icon, with the red letters \"AIR\" printed over the gray letters \"MAX.\" The black short-sleeved shirt exits the scene from above.", "visual_elements": [ { "content": "A black short-sleeved shirt enters the scene from above.", "type": "action", "weight": 3 }, { "content": "The shirt is adorned with red and gray letters and a white icon.", "type": "attribute", "weight": 3 }, { "content": "The red letters \"AIR\" are imprinted over the gray letters \"MAX\".", "type": "attribute", "weight": 2 }, { "content": "The black short-sleeved shirt exits the scene from above.", "type": "action", "weight": 3 } ] }, { "event": "A black hoodie enters the scene from above. The hoodie boasts white letters \"TM\" and \"SPORTSWEAR TM\". The black hoodie exits the scene from above.", "visual_elements": [ { "content": "A black hoodie enters the scene from above.", "type": "action", "weight": 3 }, { "content": "The hoodie boasts white letters \"TM\" and \"SPORTSWEAR TM\".", "type": "attribute", "weight": 2 }, { "content": "The black hoodie exits the scene from above.", "type": "attribute", "weight": 3 } ] }, { "event": "A person holding a black hair straightener enters the scene from the right side. The handle and cord of the hair straightener are pink. The hand holding the hair straightener exits the scene from the right side.", "visual_elements": [ { "content": "A person holding a black hair straightener enters the scene from the right side.", "type": "action", "weight": 3 }, { "content": "The handle and cord of the hair straightener are pink.", "type": "attribute", "weight": 2 }, { "content": "The hand holding the straightener exits the scene from the right side.", "type": "action", "weight": 3 } ] }, { "event": "A hand places the black baseball cap, black hoodie, and hair straightener onto the black barstool in sequence. After completing these actions, the hand exits the scene from above.", "visual_elements": [ { "content": "A hand places the black baseball cap, black hoodie, and hair straightener onto the black barstool in sequence.", "type": "action", "weight": 3 }, { "content": "After completing these actions, the hand exits the scene from above.", "type": "action", "weight": 2 } ] } ], "n_events": 6, "n_elements": 23 }, { "index": "TUNA_0438", "video_path": "PerceptionTest/video_11342.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 24.67, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a table. On the left side of the table, there is a black desk lamp. In front of the lamp, there is a transparent glass cup. The black cord of the lamp is partially wrapped around the lamp pole and partially coiled on the tabletop. The black plug of the lamp is inserted into a white power strip. The background features a half-open white door and a part of a gray sofa. A person wearing a gray-striped long-sleeved shirt stands beside the table, with only his upper torso visible in the scene.\nThe person first leans forward and reaches out his left hand to press the button on the desk lamp. The lamp turns on. The person then begins to flip through a book in front of him. After stopping flipping the book, the person leans forward again and presses the button on the desk lamp with the left hand. The lamp turns off. The person places the left hand on the white outlet to stabilize it and uses the right hand to simulate unplugging the lamp's cord.\nThe person straightens up and holds an orange click pen in the right hand, constantly tapping the button of the pen on the tabletop. Finally, after putting down the pen, the person once again reaches out the left hand to press the switch button on the desk lamp, and the lamp turns on. The person sidesteps closer to the camera, and the left hand exits the scene from the right side.", "events": [ { "event": "At the beginning of the video, the camera focuses on a table. On the left side of the table, there is a black desk lamp. In front of the lamp, there is a transparent glass cup. The black cord of the lamp is partially wrapped around the lamp pole and partially coiled on the tabletop. The black plug of the lamp is inserted into a white power strip. The background features a half-open white door and a part of a gray sofa. A person wearing a gray-striped long-sleeved shirt stands beside the table, with only his upper torso visible in the scene.", "visual_elements": [ { "content": "The camera focuses on a table.", "type": "camera", "weight": 3 }, { "content": "On the left side of the table, there is a black desk lamp.", "type": "attribute", "weight": 3 }, { "content": "In front of the lamp, there is a transparent glass cup.", "type": "attribute", "weight": 1 }, { "content": "The black cord of the lamp is partially wrapped around the lamp pole and partially coiled on the tabletop.", "type": "attribute", "weight": 2 }, { "content": "The black plug of the lamp is inserted into a white power strip.", "type": "attribute", "weight": 2 }, { "content": "The background features a half-open white door and a part of a gray sofa.", "type": "scene", "weight": 2 }, { "content": "A person wearing a gray-striped long-sleeved shirt stands beside the table.", "type": "attribute", "weight": 3 }, { "content": "Only the person's upper torso is visible in the scene.", "type": "attribute", "weight": 2 } ] }, { "event": "The person first leans forward and reaches out his left hand to press the button on the desk lamp. The lamp turns on. The person then begins to flip through a book in front of him. After stopping flipping the book, the person leans forward again and presses the button on the desk lamp with the left hand. The lamp turns off. The person places the left hand on the white outlet to stabilize it and uses the right hand to simulate unplugging the lamp's cord.", "visual_elements": [ { "content": "The person first leans forward and reaches out his left hand to press the button on the desk lamp.", "type": "action", "weight": 3 }, { "content": "The lamp turns on.", "type": "attribute", "weight": 3 }, { "content": "The person begins to flip through a book in front of him.", "type": "action", "weight": 3 }, { "content": "After stopping flipping the book,  the person leans forward again and presses the button on the desk lamp with the left hand.", "type": "action", "weight": 3 }, { "content": "The lamp turns off.", "type": "attribute", "weight": 3 }, { "content": "The person places the left hand on the white outlet to stabilize it.", "type": "attribute", "weight": 2 }, { "content": "Simulate the action of unplugging a lamp with the right hand.", "type": "action", "weight": 3 } ] }, { "event": "The person straightens up and holds an orange click pen in the right hand, constantly tapping the button of the pen on the tabletop. Finally, after putting down the pen, the person once again reaches out the left hand to press the switch button on the desk lamp, and the lamp turns on. The person sidesteps closer to the camera, and the left hand exits the scene from the right side.", "visual_elements": [ { "content": "The person straightens up and holds an orange click pen in the right hand.", "type": "attribute", "weight": 3 }, { "content": "The person constantly tap the button of the pen on the tabletop.", "type": "action", "weight": 3 }, { "content": "After putting down the pen, the person once again reaches out the left hand to press the switch button on the desk lamp.", "type": "action", "weight": 3 }, { "content": "The lamp turns on.", "type": "attribute", "weight": 3 }, { "content": "The person sidesteps closer to the camera, and the left hand exits the scene from the right side.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0439", "video_path": "PerceptionTest/video_11359.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 29.57, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a small wooden table. A woman dressed in a shirt with black and white patterns and denim shorts sits beside the table, her hands resting on either side of it. On the table, to the right of the woman's left hand, there is a salt shaker and a bowl containing eggs. To the right of the eggs, there is a white empty bowl. To the right of the empty bowl, there lies a fork. Additionally, to the left of the woman's right hand on the table, there are three colored letter magnets: a green \"X\", a yellow \"O\", and a blue \"B\". On the other half of the table, farther from the woman, there is a plate adorned with colorful patterns, a carton of milk, and a glass.\nThe woman first shuffles the three letter magnets on the table and then arranges \"B\", \"O\", and \"X\" in a sequence extending upwards to the left, based on her own right-to-left perspective.\nNext, the woman uses her right hand to pick up an egg from the bowl, while her left hand holds the colorful plate. She gently taps the egg against the plate with her right hand and cracks it into the white empty bowl. She places the eggshell on the table and then picks up and shakes the salt shaker, sprinkling salt into the bowl with the beaten egg. With her left hand supporting the bowl, the woman takes the fork with her right hand and stirs it in the bowl until the egg mixture is evenly blended. After stirring, she places the fork back on the table and returns her hands to their original positions on either side. At the end of the video, the woman's right hand extends from the left side of the scene.", "events": [ { "event": "At the beginning of the video, the camera focuses on a small wooden table. A woman dressed in a shirt with black and white patterns and denim shorts sits beside the table, her hands resting on either side of it. On the table, to the right of the woman's left hand, there is a salt shaker and a bowl containing eggs. To the right of the eggs, there is a white empty bowl. To the right of the empty bowl, there lies a fork. Additionally, to the left of the woman's right hand on the table, there are three colored letter magnets: a green \"X\", a yellow \"O\", and a blue \"B\". On the other half of the table, farther from the woman, there is a plate adorned with colorful patterns, a carton of milk, and a glass.", "visual_elements": [ { "content": "The camera focuses on a small wooden table.", "type": "camera", "weight": 3 }, { "content": "A woman dressed in a shirt with black and white patterns and denim shorts sits beside the table.", "type": "attribute", "weight": 3 }, { "content": "The woman's hands rest on either side of the table.", "type": "attribute", "weight": 1 }, { "content": "On the table, to the right of the woman's left hand, there is a salt shaker and a bowl filled with eggs.", "type": "attribute", "weight": 2 }, { "content": "To the right of the eggs, there is a white empty bowl.", "type": "attribute", "weight": 2 }, { "content": "To the right of the empty bowl, there lies a fork.", "type": "attribute", "weight": 1 }, { "content": "On the table, to the left of the woman's right hand, there are three colored letter magnets: a green \"X\", a yellow \"O\", and a blue \"B\".", "type": "attribute", "weight": 2 }, { "content": "On the other half of the table, farther from the woman, there is a plate adorned with colorful patterns, a carton of milk, and a glass.", "type": "attribute", "weight": 2 } ] }, { "event": "The woman first shuffles the three letter magnets on the table and then arranges \"B\", \"O\", and \"X\" in a sequence extending upwards to the left, based on her own right-to-left perspective.", "visual_elements": [ { "content": "The woman first shuffles the three letter magnets on the table.", "type": "action", "weight": 3 }, { "content": "Then, the woman arranges \"B\", \"O\", and \"X\" in a sequence extending upwards to the left, based on her own right-to-left perspective.", "type": "action", "weight": 3 } ] }, { "event": "Next, the woman uses her right hand to pick up an egg from the bowl, while her left hand holds the colorful plate. She gently taps the egg against the plate with her right hand and cracks it into the white empty bowl. She places the eggshell on the table and then picks up and shakes the salt shaker, sprinkling salt into the bowl with the beaten egg. With her left hand supporting the bowl, the woman takes the fork with her right hand and stirs it in the bowl until the egg mixture is evenly blended. After stirring, she places the fork back on the table and returns her hands to their original positions on either side. At the end of the video, the woman's right hand extends from the left side of the scene.", "visual_elements": [ { "content": "The woman uses her right hand to pick up an egg from the bowl.", "type": "action", "weight": 2 }, { "content": "With her left hand holding the colorful plate, the woman gently taps the egg against it with her right hand.", "type": "action", "weight": 3 }, { "content": "The woman cracks the egg into the white empty bowl.", "type": "action", "weight": 3 }, { "content": "The woman places the eggshell on the table.", "type": "action", "weight": 2 }, { "content": "The woman picks up and shakes the salt shaker, sprinkling salt into the bowl with the beaten egg.", "type": "action", "weight": 3 }, { "content": "The woman supports the bowl with her left hand.", "type": "action", "weight": 2 }, { "content": "The woman takes the fork with her right hand and stirs it in the bowl until the egg mixture is evenly blended.", "type": "action", "weight": 3 }, { "content": "After stirring, the woman places the fork back on the table and returns her hands to their original positions on either side.", "type": "action", "weight": 2 }, { "content": "At the end of the video, the woman's right hand extends from the left side of the scene.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0440", "video_path": "PerceptionTest/video_11360.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 18.96, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a wooden table. On the table, there is a stack of books with a white plastic cutting board placed diagonally atop them. To the left of the books on the table, there is a small bottle with a yellow label and a fork. The left wall is adorned with posters and schedules. Behind the books on the table, there are other books and documents. The video is handheld, with the filmer's right side of the body appearing on the right side of the scene.\nThe filmer, dressed in a dark coat, first extends their right hand to pick up the white cutting board placed above the books. They lean the cutting board against the stack of books on the table. Next, the filmer picks up a thick book with a red cover from the right side of the table. They place the thick book on the side of the cutting board closest to the camera.\nThe filmer then leans forward and picks up an apple from the left side of the table. He place the apple at the highest point of the cutting board and let it slide down freely. However, the apple's movement is eventually stopped by the thick book at the end of the cutting board. The filmer then picks up the apple again and places it back at the highest point of the cutting board. The video stops after the filmer repeats this action for the third time, with the apple once again being stopped by the thick book at the end of the cutting board. Throughout the filming process, the camera shakes following the filmer's movements.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wooden table. On the table, there is a stack of books with a white plastic cutting board placed diagonally atop them. To the left of the books on the table, there is a small bottle with a yellow label and a fork. The left wall is adorned with posters and schedules. Behind the books on the table, there are other books and documents. The video is handheld, with the filmer's right side of the body appearing on the right side of the scene.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a stack of books.", "type": "attribute", "weight": 3 }, { "content": "A white plastic cutting board is placed diagonally atop the books.", "type": "attribute", "weight": 3 }, { "content": "To the left of the books on the table, there is a small bottle with a yellow label and a fork.", "type": "attribute", "weight": 2 }, { "content": "The left wall is adorned with posters and schedules.", "type": "scene", "weight": 1 }, { "content": "Behind the books on the table, there are other books and documents.", "type": "scene", "weight": 1 }, { "content": "The video is handheld.", "type": "camera", "weight": 3 }, { "content": "The filmer's right side of the body appears on the right side of the scene.", "type": "attribute", "weight": 3 } ] }, { "event": "The filmer, dressed in a dark coat, first extends their right hand to pick up the white cutting board placed above the books. They lean the cutting board against the stack of books on the table. Next, the filmer picks up a thick book with a red cover from the right side of the table. They place the thick book on the side of the cutting board closest to the camera.", "visual_elements": [ { "content": "The filmer is dressed in a dark coat.", "type": "attribute", "weight": 2 }, { "content": "The filmer first extends their right hand to pick up the white cutting board placed above the books.", "type": "action", "weight": 3 }, { "content": "The person lean the cutting board against the stack of books on the table.", "type": "action", "weight": 3 }, { "content": "The person picks up a thick book with a red cover from the right side of the table.", "type": "action", "weight": 3 }, { "content": "The person place the thick book on the side of the cutting board closest to the camera.", "type": "action", "weight": 3 } ] }, { "event": "The filmer then leans forward and picks up an apple from the left side of the table. He place the apple at the highest point of the cutting board and let it slide down freely. However, the apple's movement is eventually stopped by the thick book at the end of the cutting board. The filmer then picks up the apple again and places it back at the highest point of the cutting board. The video stops after the filmer repeats this action for the third time, with the apple once again being stopped by the thick book at the end of the cutting board. Throughout the filming process, the camera shakes following the filmer's movements.", "visual_elements": [ { "content": "The person leans forward and picks up an apple from the left side of the table.", "type": "action", "weight": 3 }, { "content": "The person place the apple at the highest point of the cutting board and let it slide down freely.", "type": "action", "weight": 3 }, { "content": "The apple's movement is eventually stopped by the thick book at the end of the cutting board.", "type": "action", "weight": 3 }, { "content": "The person picks up the apple again and places it back at the highest point of the cutting board.", "type": "action", "weight": 3 }, { "content": "The video stops after the filmer repeats this action for the third time.", "type": "camera", "weight": 3 }, { "content": "Throughout the filming process, the camera shakes following the filmer's movements.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0441", "video_path": "PerceptionTest/video_11368.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 22.5, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a wooden table with several sheets of white paper laid out on its surface. The background is a yellow wall with some textures, and the bottom of a black television hanging on the wall is visible.\nA pair of hands enters the scene from the bottom right corner. The nails on these hands are painted red. The right hand holds a red pen. The left hand supports the white paper while the right hand, holding the paper, draws a rectangle on it first, with red borders. Then, below the rectangle, a circle is drawn. After completing the circle, the pen in the right hand is swapped to the left hand, and the left hand, holding the pen, exits the scene. The right hand picks up the paper and shows it to the camera.\nThe right hand places the paper back on the table. From the right side of the scene, the right hand takes out a black poster and leans it against the wall. The poster has a colorful circular pattern in the center, surrounded by gold letters. The right hand then takes out another similar poster from the right side of the scene. This poster has a colorful rectangular pattern in the center. The positions of the two posters correspond to the patterns drawn on the white paper.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wooden table with several sheets of white paper laid out on its surface. The background is a yellow wall with some textures, and the bottom of a black television hanging on the wall is visible.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "Several sheets of white paper are laid out on the table.", "type": "attribute", "weight": 3 }, { "content": "The background is a yellow wall with some textures.", "type": "scene", "weight": 2 }, { "content": "The bottom part of a black television hanging on the wall is visible in the shot.", "type": "scene", "weight": 1 } ] }, { "event": "A pair of hands enters the scene from the bottom right corner. The nails on these hands are painted red. The right hand holds a red pen. The left hand supports the white paper while the right hand, holding the paper, draws a rectangle on it first, with red borders. Then, below the rectangle, a circle is drawn. After completing the circle, the pen in the right hand is swapped to the left hand, and the left hand, holding the pen, exits the scene. The right hand picks up the paper and shows it to the camera.", "visual_elements": [ { "content": "A pair of hands enters the scene from the bottom right corner.", "type": "action", "weight": 3 }, { "content": "The nails on these hands are painted red.", "type": "attribute", "weight": 2 }, { "content": "The right hand holds a red pen.", "type": "attribute", "weight": 2 }, { "content": "The left hand supports a sheet of white paper.", "type": "attribute", "weight": 2 }, { "content": "The right hand, holding the paper, first draws a rectangle on it.", "type": "action", "weight": 3 }, { "content": "The borders of the rectangle are red.", "type": "attribute", "weight": 3 }, { "content": "The right hand continues to draw a circle below the rectangle.", "type": "action", "weight": 3 }, { "content": "After completing the circle, the pen is transferred from the right hand to the left hand.", "type": "action", "weight": 2 }, { "content": "The left hand, holding the pen, exits the scene.", "type": "action", "weight": 2 }, { "content": "The right hand picks up the paper and shows it to the camera.", "type": "action", "weight": 3 } ] }, { "event": "The right hand places the paper back on the table. From the right side of the scene, the right hand takes out a black poster and leans it against the wall. The poster has a colorful circular pattern in the center, surrounded by gold letters. The right hand then takes out another similar poster from the right side of the scene. This poster has a colorful rectangular pattern in the center. The positions of the two posters correspond to the patterns drawn on the white paper.", "visual_elements": [ { "content": "The right hand places the paper back on the table.", "type": "action", "weight": 2 }, { "content": "The right hand takes out a black poster from the right side of the scene and leans it against the wall.", "type": "action", "weight": 3 }, { "content": "The poster has a colorful circular pattern in the middle, surrounded by gold letters.", "type": "attribute", "weight": 2 }, { "content": "The right hand takes out another similar poster from the right side of the scene.", "type": "action", "weight": 2 }, { "content": "This poster has a colorful rectangular pattern in the middle.", "type": "attribute", "weight": 2 }, { "content": "The positions of the two posters correspond to the patterns drawn on the white paper.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0442", "video_path": "PerceptionTest/video_11379.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 32.19, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the start of the video, the camera focuses on a table covered with a festive-patterned tablecloth. A person wearing a green T-shirt is seated next to the table. On the table, near the camera on the left, there is a transparent plastic container filled with eggs, and to the right, there is a striped shoe. Behind the eggs, there is an empty glass bowl, and to the right front of the bowl, an apple and a box of tea bags. On the back of the table, there is also a blue seasoning bottle, with its white plastic lid open. The background features a white wall and some wires.\nThe person sitting at the table reaches into the container of eggs with his right hand and takes out an egg. Then, the person repeatedly taps the egg against the edge of the glass bowl until it cracks. Next, they use both hands to break the eggshell, allowing the egg yolk and white to flow into the glass bowl. The person then reaches into the blue seasoning container with their right hand, using their thumb and forefinger to take a pinch of seasoning and add it to the egg mixture. Using their left hand, they pick up the fork beside the glass bowl and begin stirring the egg mixture. After stopping the stirring, the person moves the tea bag from one side of the apple to the other with their right hand.", "events": [ { "event": "At the start of the video, the camera focuses on a table covered with a festive-patterned tablecloth. A person wearing a green T-shirt is seated next to the table. On the table, near the camera on the left, there is a transparent plastic container filled with eggs, and to the right, there is a striped shoe. Behind the eggs, there is an empty glass bowl, and to the right front of the bowl, an apple and a box of tea bags. On the back of the table, there is also a blue seasoning bottle, with its white plastic lid open. The background features a white wall and some wires.", "visual_elements": [ { "content": "The camera focuses on a table covered with a festive-patterned tablecloth.", "type": "camera", "weight": 3 }, { "content": "A person wearing a green T-shirt is sitting next to the table.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the table, near the camera, there is a transparent plastic container filled with eggs.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the table, near the camera, there is a striped shoe.", "type": "attribute", "weight": 2 }, { "content": "Behind the eggs, there is an empty glass bowl.", "type": "attribute", "weight": 3 }, { "content": "To the right front of the glass bowl, there is an apple and a box of tea bags.", "type": "attribute", "weight": 2 }, { "content": "On the back of the table, there is also a blue seasoning bottle.", "type": "attribute", "weight": 3 }, { "content": "The white plastic lid of the seasoning bottle is open.", "type": "attribute", "weight": 1 }, { "content": "The background features a white wall and some wires.", "type": "scene", "weight": 1 } ] }, { "event": "The person sitting at the table reaches into the container of eggs with his right hand and takes out an egg. Then, the person repeatedly taps the egg against the edge of the glass bowl until it cracks. Next, they use both hands to break the eggshell, allowing the egg yolk and white to flow into the glass bowl. The person then reaches into the blue seasoning container with their right hand, using their thumb and forefinger to take a pinch of seasoning and add it to the egg mixture. Using their left hand, they pick up the fork beside the glass bowl and begin stirring the egg mixture. After stopping the stirring, the person moves the tea bag from one side of the apple to the other with their right hand.", "visual_elements": [ { "content": "The person sitting at the table reaches into the container of eggs with his right hand and takes out an egg.", "type": "action", "weight": 3 }, { "content": "The person repeatedly taps the egg against the edge of the glass bowl until it cracks.", "type": "action", "weight": 3 }, { "content": "The person uses both hands to break the eggshell, allowing the egg yolk and white to flow into the glass bowl.", "type": "action", "weight": 3 }, { "content": "The person reaches into the blue seasoning container with their right hand, pinching a bit of seasoning with their thumb and forefinger, and adds it to the egg mixture.", "type": "action", "weight": 3 }, { "content": "The person picks up the fork next to the glass bowl with their left hand and begins to stir the egg mixture.", "type": "action", "weight": 3 }, { "content": "After stopping the stirring, the person moves the tea bag from one side of the apple to the other with their right hand.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0443", "video_path": "PerceptionTest/video_11411.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 24.34, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focused on a wooden table. On the table, there is a black cup with a spoon inserted into it, and a blue pen resting on the edge of the cup, with the pen tip resting on the cup's handle. The background features a white wall with a decorative painting partially in the scene. The scene of the painting is made of dark wood.\nThen, a hand enters the scene from the right, holding a cigarette box with Italian text and colorful patterns along with a white label. The hand holds the cigarette box for a few seconds in front of the camera before leaving the scene on the right side.\nNext, a hand enters the scene from the right, this time holding a black smartphone with the screen facing the camera. The hand holds the phone for a few seconds in front of the camera before leaving the scene on the right side.\nThen, a hand enters the scene from the right, holding a book with a green and white cover. The cover of the book has some text and a photo of a person. The hand holds the book for a few seconds in front of the camera before leaving the scene on the right side.\nLater, a hand holding the book enters the scene from the right and leaves again. A hand holding the phone enters the scene from the right and leaves again. The hand holding the book enters the scene once more from the right and leaves again.", "events": [ { "event": "The video begins with the camera focused on a wooden table. On the table, there is a black cup with a spoon inserted into it, and a blue pen resting on the edge of the cup, with the pen tip resting on the cup's handle. The background features a white wall with a decorative painting partially in the scene. The scene of the painting is made of dark wood.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a black cup.", "type": "attribute", "weight": 3 }, { "content": "There is a spoon inserted in the cup.", "type": "attribute", "weight": 2 }, { "content": "There is a blue pen resting on the edge of the cup, with the pen tip resting on the cup's handle.", "type": "attribute", "weight": 2 }, { "content": "The background is a white wall.", "type": "scene", "weight": 2 }, { "content": "The decorative painting on the wall is partially in the scene.", "type": "scene", "weight": 1 }, { "content": "The frame of the painting is made of dark wood.", "type": "scene", "weight": 1 } ] }, { "event": "Then, a hand enters the scene from the right, holding a cigarette box with Italian text and colorful patterns along with a white label. The hand holds the cigarette box for a few seconds in front of the camera before leaving the scene on the right side.", "visual_elements": [ { "content": "A hand enters the scene from the right.", "type": "action", "weight": 3 }, { "content": "The hand is holding a cigarette box with Italian text.", "type": "attribute", "weight": 3 }, { "content": "The cigarette box has colorful patterns and a white label.", "type": "attribute", "weight": 2 }, { "content": "The hand holds the cigarette box in front of the camera for a few seconds before leaving the scene.", "type": "action", "weight": 3 } ] }, { "event": "Next, a hand enters the scene from the right, this time holding a black smartphone with the screen facing the camera. The hand holds the phone for a few seconds in front of the camera before leaving the scene on the right side.", "visual_elements": [ { "content": "A hand enters the scene again from the right.", "type": "action", "weight": 3 }, { "content": "This time, the hand is holding a black smartphone.", "type": "attribute", "weight": 3 }, { "content": "The smartphone screen is facing the camera.", "type": "attribute", "weight": 2 }, { "content": "The hand holds the phone in front of the camera for a few seconds before leaving the scene.", "type": "action", "weight": 3 } ] }, { "event": "Then, a hand enters the scene from the right, holding a book with a green and white cover. The cover of the book has some text and a photo of a person. The hand holds the book for a few seconds in front of the camera before leaving the scene on the right side.", "visual_elements": [ { "content": "A hand enters the scene from the right again.", "type": "action", "weight": 3 }, { "content": "The hand is holding a book with a green and white cover.", "type": "attribute", "weight": 3 }, { "content": "The book cover has some text and a photo of a person.", "type": "attribute", "weight": 2 }, { "content": "The hand holds the book in front of the camera for a few seconds before leaving the scene.", "type": "action", "weight": 3 } ] }, { "event": "Later, a hand holding the book enters the scene from the right and leaves again. A hand holding the phone enters the scene from the right and leaves again. The hand holding the book enters the scene once more from the right and leaves again.", "visual_elements": [ { "content": "The hand holding the book enters the scene from the right and leaves again.", "type": "action", "weight": 3 }, { "content": "The hand holding the phone enters the scene from the right and leaves again.", "type": "action", "weight": 3 }, { "content": "The hand holding the book enters the scene from the right again and leaves.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 22 }, { "index": "TUNA_0444", "video_path": "PerceptionTest/video_11428.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 29.83, "resolution": { "width": 1920, "height": 1080 }, "caption": "The camera gradually focuses on a white round table, with a blue edge. On the far left of the table, there is a red pen. To the right of the red pen, there is a bowl filled with white particles, and a spoon is placed inside the bowl. To the right of the bowl are a cup and an apple. The background features a white wall.\nA man wearing a green T-shirt and dark pants walks in from the left side of the scene and stands in front of the table. He first extends both hands and claps twice in front of himself. The man then picks up the red pen on the table with his right hand, taps the table three times with the pen, and places it back in its original position.\nThe man picks up the spoon from the bowl with his right hand, scoops up the white particles, and pours them into the nearby cup. This action is repeated three times. Then, he places the spoon down and picks up the apple with his left hand. He tosses the apple to his right hand, and the apple is repeatedly tossed between his left and right hands before finally returning to the left hand. He places the apple back on the table and exits the scene from the left side.", "events": [ { "event": "The camera gradually focuses on a white round table, with a blue edge. On the far left of the table, there is a red pen. To the right of the red pen, there is a bowl filled with white particles, and a spoon is placed inside the bowl. To the right of the bowl are a cup and an apple. The background features a white wall.", "visual_elements": [ { "content": "The camera gradually focuses on a white round table.", "type": "camera", "weight": 3 }, { "content": "The edge of the table is blue.", "type": "attribute", "weight": 1 }, { "content": "On the far left of the table, there is a red pen.", "type": "attribute", "weight": 3 }, { "content": "To the right of the red pen, there is a bowl filled with white particles.", "type": "attribute", "weight": 3 }, { "content": "There is a spoon placed inside the bowl filled with white particles.", "type": "attribute", "weight": 2 }, { "content": "To the right of the bowl are a cup and an apple.", "type": "attribute", "weight": 3 }, { "content": "The background is a white wall.", "type": "scene", "weight": 1 } ] }, { "event": "A man wearing a green T-shirt and dark pants walks in from the left side of the scene and stands in front of the table. He first extends both hands and claps twice in front of himself. The man then picks up the red pen on the table with his right hand, taps the table three times with the pen, and places it back in its original position.", "visual_elements": [ { "content": "A man wearing a green T-shirt and dark pants walks in from the left side of the scene and stands in front of the table.", "type": "action", "weight": 3 }, { "content": "The man first extends both hands and claps twice in front of himself.", "type": "action", "weight": 3 }, { "content": "The man picks up the red pen on the table with his right hand.", "type": "action", "weight": 2 }, { "content": "The man taps the table three times with the pen and then puts it back in its place.", "type": "action", "weight": 3 } ] }, { "event": "The man picks up the spoon from the bowl with his right hand, scoops up the white particles, and pours them into the nearby cup. This action is repeated three times. Then, he places the spoon down and picks up the apple with his left hand. He tosses the apple to his right hand, and the apple is repeatedly tossed between his left and right hands before finally returning to the left hand. He places the apple back on the table and exits the scene from the left side.", "visual_elements": [ { "content": "The man picks up the spoon from the bowl with his right hand.", "type": "action", "weight": 2 }, { "content": "The man scoops the white particles from the bowl with the spoon and pours them into the nearby cup.", "type": "action", "weight": 3 }, { "content": "The above action is repeated three times.", "type": "action", "weight": 3 }, { "content": "The man places the spoon down and picks up the apple with his left hand.", "type": "action", "weight": 3 }, { "content": "The man tosses the apple to his right hand, and it is repeatedly tossed between his left and right hands before finally returning to the left hand.", "type": "action", "weight": 3 }, { "content": "The left hand places the apple back on the table.", "type": "action", "weight": 3 }, { "content": "The man exits the scene from the left side.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0445", "video_path": "PerceptionTest/video_1144.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 21.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a white table. In the top right corner of the table, there is an iron and a small potted plant. The background features a white wall and a beige door, with some stickers and decorations on it. The bottom right corner of the scene includes some red fruits.\nThen, a hand enters from the left side of the scene, holding a red ball. The nails on this hand are painted red. Afterward, the owner of the hand enters the scene; she is a woman wearing a black tank top and light-colored pants, with only her upper body in the scene. In her other hand, she holds a book with a cover featuring a dog.\nThe woman stabilizes the red ball on the table with her right hand, then removes her right hand from the table. She then positions the book in her left hand in front of the ball, completely covering it, with the book’s cover featuring a dog facing the camera. The cover reads \"Dream Dogs.\" The woman moves the book left and right with both hands. Finally, she lifts the book, revealing the red ball behind it, then places the book back on the table. The woman exits the scene from the left side.", "events": [ { "event": "The video begins with the camera focused on a white table. In the top right corner of the table, there is an iron and a small potted plant. The background features a white wall and a beige door, with some stickers and decorations on it. The bottom right corner of the scene includes some red fruits.", "visual_elements": [ { "content": "The camera focuses on a white table.", "type": "camera", "weight": 3 }, { "content": "In the top right corner of the table, there is an iron and a small potted plant.", "type": "attribute", "weight": 2 }, { "content": "The background features a white wall and a beige door.", "type": "scene", "weight": 2 }, { "content": "The door has some stickers and decorations.", "type": "scene", "weight": 2 }, { "content": "The bottom right corner of the scene includes some red fruits.", "type": "scene", "weight": 1 } ] }, { "event": "Then, a hand enters from the left side of the scene, holding a red ball. The nails on this hand are painted red. Afterward, the owner of the hand enters the scene; she is a woman wearing a black tank top and light-colored pants, with only her upper body in the scene. In her other hand, she holds a book with a cover featuring a dog.", "visual_elements": [ { "content": "A hand enters from the left side of the scene, holding a red ball.", "type": "action", "weight": 3 }, { "content": "The nails on this hand are painted red.", "type": "attribute", "weight": 2 }, { "content": "The owner of the hand enters the scene.", "type": "action", "weight": 3 }, { "content": "She is a woman wearing a black tank top and light-colored pants.", "type": "attribute", "weight": 3 }, { "content": "Only the woman's upper body enters the scene.", "type": "attribute", "weight": 2 }, { "content": "The woman’s other hand is holding a book with a cover featuring a dog.", "type": "attribute", "weight": 2 } ] }, { "event": "The woman stabilizes the red ball on the table with her right hand, then removes her right hand from the table. She then positions the book in her left hand in front of the ball, completely covering it, with the book’s cover featuring a dog facing the camera. The cover reads \"Dream Dogs.\" The woman moves the book left and right with both hands. Finally, she lifts the book, revealing the red ball behind it, then places the book back on the table. The woman exits the scene from the left side.", "visual_elements": [ { "content": "The woman stabilizes the red ball on the table with her right hand, then removes her right hand.", "type": "action", "weight": 3 }, { "content": "The woman then places the book from her left hand in front of the ball, completely covering it.", "type": "action", "weight": 3 }, { "content": "The book has a dog-themed cover facing the camera.", "type": "attribute", "weight": 3 }, { "content": "The cover reads \"Dream Dogs.\"", "type": "attribute", "weight": 2 }, { "content": "The woman moves the book left and right with both hands.", "type": "action", "weight": 3 }, { "content": "The woman lifts the book, revealing the red ball behind it.", "type": "action", "weight": 3 }, { "content": "The woman places the book back on the table.", "type": "action", "weight": 3 }, { "content": "The woman exits the scene from the left side.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0446", "video_path": "PerceptionTest/video_11461.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 9.55, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focused on a wooden table, with a blue and white striped ball on the table. On the left side of the scene, there is an adult man's arm with his hand placed on the ball. On the right side of the scene, in front of the ball on the table, there is a thick dark brown book. The background is indoors with white ceramic tile flooring. On the left side, there is a white double door with a mirror; in the mirror, the table, a chair, the man standing beside the table, and a yellow balloon under the table can be seen. On the right side, there is a glass door with a pattern, and there are blue wires on both sides of the door.\nThe man pushes the ball forward. The ball rolls for a short distance before being stopped by the thick book in front, causing the ball to change direction and roll off the right side of the scene. The man walks a few steps forward and enters the scene from the left side. He is wearing a black short-sleeve shirt and dark pants. He extends his left hand to pick up the thick book from the table, and his right hand stretches out from the right side of the scene to grab the ball. The camera gently sways slightly with the man's movement.\nThe man places the ball back on the table and then forcefully pushes it forward. The ball rolls forward and eventually falls off the table. The camera follows the ball's movement and shifts right, revealing another part of the room. A TV is placed on a wooden TV cabinet, and there is a black leather shoe on the table with a white cloth underneath it.", "events": [ { "event": "The video begins with the camera focused on a wooden table, with a blue and white striped ball on the table. On the left side of the scene, there is an adult man's arm with his hand placed on the ball. On the right side of the scene, in front of the ball on the table, there is a thick dark brown book. The background is indoors with white ceramic tile flooring. On the left side, there is a white double door with a mirror; in the mirror, the table, a chair, the man standing beside the table, and a yellow balloon under the table can be seen. On the right side, there is a glass door with a pattern, and there are blue wires on both sides of the door.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a blue and white striped ball.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the scene, there is an adult man's arm.", "type": "attribute", "weight": 3 }, { "content": "The man’s hand is placed on the ball.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the scene, in front of the ball on the table, there is a thick dark brown book.", "type": "attribute", "weight": 2 }, { "content": "The background is indoors, with white ceramic tile flooring.", "type": "scene", "weight": 2 }, { "content": "On the left side, there is a white double door with a mirror.", "type": "scene", "weight": 3 }, { "content": "In the mirror, the table, a chair, the man standing beside the table, and a yellow balloon under the table are visible.", "type": "scene", "weight": 2 }, { "content": "On the right side, there is a patterned glass door.", "type": "scene", "weight": 3 }, { "content": "There are some blue wires in front and behind the glass door.", "type": "scene", "weight": 1 } ] }, { "event": "The man pushes the ball forward. The ball rolls for a short distance before being stopped by the thick book in front, causing the ball to change direction and roll off the right side of the scene. The man walks a few steps forward and enters the scene from the left side. He is wearing a black short-sleeve shirt and dark pants. He extends his left hand to pick up the thick book from the table, and his right hand stretches out from the right side of the scene to grab the ball. The camera gently sways slightly with the man's movement.", "visual_elements": [ { "content": "The man pushes the ball forward.", "type": "action", "weight": 3 }, { "content": "The ball rolls forward a short distance before being stopped by the thick book in front.", "type": "action", "weight": 3 }, { "content": "The ball changes direction and rolls off the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The man walks a few steps forward and enters the scene from the left side.", "type": "action", "weight": 3 }, { "content": "The man is wearing a black short-sleeve shirt and dark pants.", "type": "attribute", "weight": 2 }, { "content": "The man extends his left hand and picks up the thick book from the table.", "type": "action", "weight": 3 }, { "content": "The man extends his right hand from the right side of the scene to grab the ball.", "type": "action", "weight": 3 }, { "content": "The camera slightly sways left and right, following the man’s movement.", "type": "camera", "weight": 3 } ] }, { "event": "The man places the ball back on the table and then forcefully pushes it forward. The ball rolls forward and eventually falls off the table. The camera follows the ball's movement and shifts right, revealing another part of the room. A TV is placed on a wooden TV cabinet, and there is a black leather shoe on the table with a white cloth underneath it.", "visual_elements": [ { "content": "The man places the ball back on the table and then forcefully pushes it forward.", "type": "action", "weight": 3 }, { "content": "The ball rolls forward and then falls off the table.", "type": "action", "weight": 3 }, { "content": "The camera follows the ball’s movement and shifts to the right.", "type": "camera", "weight": 3 }, { "content": "Another part of the room appears in the scene.", "type": "scene", "weight": 1 }, { "content": "A television is placed on a wooden TV cabinet.", "type": "scene", "weight": 2 }, { "content": "There is also a black leather shoe on the table.", "type": "scene", "weight": 2 }, { "content": "There is a white cloth under the leather shoe.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 25 }, { "index": "TUNA_0447", "video_path": "PerceptionTest/video_11500.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.97, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focused on a kitchen countertop. In the scene, there is a woman wearing a gray robe with white heart patterns, and her upper body is visible. In front of her, there is a box of milk, with a piece of yellow cheese on the left and a white bowl on the right. To the right of the milk, there is a can of salt. Next to the salt, there is a bottle of cooking oil. On the table, which is closer to the camera, there is a white plate on the left, with some yellow granules and green vegetable leaves. To the right of the plate is a yellow-packaged bag.\nThe woman first extends both hands in front of the screen and claps a few times. She then reaches with her right hand toward the plate, picking up some of the granules and placing them into the bowl in front of her. The woman’s left hand picks up the yellow-packaged bag and pours some into the bowl. Next, the woman’s right hand continues to reach for the plate, picking up the green vegetable leaves, and using both hands to tear the leaves before tossing them into the bowl. The action of picking and tearing the leaves is repeated twice.\nThe woman picks up the cooking oil, opens the lid, and pours some oil into the bowl. She then sets the oil bottle down, picks up a spoon from the plate, and stirs the contents of the bowl a few times. Next, the woman picks up the salt container and sprinkles some salt into the bowl. The camera stops as the woman is about to place the salt container on the left side of the table next to the milk.", "events": [ { "event": "The video begins with the camera focused on a kitchen countertop. In the scene, there is a woman wearing a gray robe with white heart patterns, and her upper body is visible. In front of her, there is a box of milk, with a piece of yellow cheese on the left and a white bowl on the right. To the right of the milk, there is a can of salt. Next to the salt, there is a bottle of cooking oil. On the table, which is closer to the camera, there is a white plate on the left, with some yellow granules and green vegetable leaves. To the right of the plate is a yellow-packaged bag.", "visual_elements": [ { "content": "The camera focuses on a kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "In the scene, a woman wearing a gray robe with white heart patterns is visible.", "type": "attribute", "weight": 3 }, { "content": "Only the upper half of the woman appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "In front of the woman, there is a box of milk lying horizontally.", "type": "attribute", "weight": 3 }, { "content": "To the left of the milk box, there is a piece of yellow cheese, and to the right, a white bowl.", "type": "attribute", "weight": 3 }, { "content": "To the right of the milk, there is a can of salt.", "type": "attribute", "weight": 3 }, { "content": "To the right of the salt, there is a bottle of cooking oil.", "type": "attribute", "weight": 3 }, { "content": "On the table, closer to the camera, there is a white plate on the left.", "type": "attribute", "weight": 3 }, { "content": "The plate contains some yellow granules and green vegetable leaves.", "type": "attribute", "weight": 3 }, { "content": "To the right of the plate, there is a bag with yellow packaging.", "type": "attribute", "weight": 3 } ] }, { "event": "The woman first extends both hands in front of the screen and claps a few times. She then reaches with her right hand toward the plate, picking up some of the granules and placing them into the bowl in front of her. The woman’s left hand picks up the yellow-packaged bag and pours some into the bowl. Next, the woman’s right hand continues to reach for the plate, picking up the green vegetable leaves, and using both hands to tear the leaves before tossing them into the bowl. The action of picking and tearing the leaves is repeated twice.", "visual_elements": [ { "content": "The woman first extends both hands in front of the screen and claps a few times.", "type": "action", "weight": 3 }, { "content": "The woman extends her right hand toward the plate.", "type": "action", "weight": 2 }, { "content": "She picks up some granules from the plate one by one and places them into the bowl in front of her.", "type": "action", "weight": 3 }, { "content": "The woman’s left hand picks up the yellow-packaged bag and pours some into the bowl.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand continues to reach for the plate, picking up the green vegetable leaves.", "type": "action", "weight": 3 }, { "content": "She tears the vegetable leaves with both hands before tossing them into the bowl.", "type": "action", "weight": 3 }, { "content": "The actions of picking and tearing the leaves are repeated twice.", "type": "attribute", "weight": 3 } ] }, { "event": "The woman picks up the cooking oil, opens the lid, and pours some oil into the bowl. She then sets the oil bottle down, picks up a spoon from the plate, and stirs the contents of the bowl a few times. Next, the woman picks up the salt container and sprinkles some salt into the bowl. The camera stops as the woman is about to place the salt container on the left side of the table next to the milk.", "visual_elements": [ { "content": "The woman picks up the cooking oil.", "type": "action", "weight": 2 }, { "content": "She opens the lid of the bottle and pours some oil into the bowl.", "type": "action", "weight": 3 }, { "content": "She sets the oil bottle down.", "type": "action", "weight": 2 }, { "content": "The woman picks up a spoon from the plate and stirs the contents of the bowl a few times.", "type": "action", "weight": 3 }, { "content": "The woman picks up the salt container and sprinkles some salt into the bowl.", "type": "action", "weight": 3 }, { "content": "The camera stops as the woman is about to place the salt container on the left side of the table next to the milk.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0448", "video_path": "PerceptionTest/video_11501.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 26.49, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a top-down shot of a patterned red carpet on the floor. In the scene, a person’s legs and feet, dressed in shorts, are visible. In front of the person, on the carpet, there is a row of objects. On the far left is a green plastic box. To the right of the plastic box is a blue circular biscuit tin. To the right of the biscuit tin is a transparent glass cup. To the right of the glass cup is a cup designed with a tiger motif.\nThe person crosses their legs and sits on the carpet. They pick up the blue biscuit tin in front of them and open the lid. They then place the lid to the side on their right leg. The person swaps the position of the open biscuit tin with the green plastic box.\nThe person then picks up the tiger-patterned cup with their right hand and the glass cup with their left hand. They pour the liquid from the tiger cup into the glass cup until it is full. Afterward, they place the tiger cup back in its original position, and with the glass cup in hand, they lift it out of the scene. When the glass cup returns from above, the liquid inside has decreased by half. The person then places the glass cup back in its original position. The person turns their entire body to the left side of the scene to adjust their sitting posture. The video ends.", "events": [ { "event": "The video begins with a top-down shot of a patterned red carpet on the floor. In the scene, a person’s legs and feet, dressed in shorts, are visible. In front of the person, on the carpet, there is a row of objects. On the far left is a green plastic box. To the right of the plastic box is a blue circular biscuit tin. To the right of the biscuit tin is a transparent glass cup. To the right of the glass cup is a cup designed with a tiger motif.", "visual_elements": [ { "content": "The camera is shot from above, focusing on a patterned red carpet on the floor.", "type": "camera", "weight": 3 }, { "content": "In the scene, the legs and feet of a person wearing shorts appear.", "type": "attribute", "weight": 2 }, { "content": "In front of the person, there is a row of items on the carpet.", "type": "attribute", "weight": 2 }, { "content": "On the far left is a green plastic box.", "type": "attribute", "weight": 3 }, { "content": "To the right of the plastic box is a blue circular biscuit tin.", "type": "attribute", "weight": 3 }, { "content": "To the right of the biscuit tin is a transparent glass cup.", "type": "attribute", "weight": 3 }, { "content": "To the right of the glass cup is a cup designed with a tiger motif.", "type": "attribute", "weight": 3 } ] }, { "event": "The person crosses their legs and sits on the carpet. They pick up the blue biscuit tin in front of them and open the lid. They then place the lid to the side on their right leg. The person swaps the position of the open biscuit tin with the green plastic box.", "visual_elements": [ { "content": "The person crosses their legs and sits on the carpet.", "type": "action", "weight": 2 }, { "content": "The person picks up the blue biscuit tin in front of them and opens the lid.", "type": "action", "weight": 3 }, { "content": "The person places the lid of the biscuit tin on their right leg.", "type": "action", "weight": 2 }, { "content": "The person swaps the positions of the open biscuit tin and the green plastic box.", "type": "action", "weight": 3 } ] }, { "event": "The person then picks up the tiger-patterned cup with their right hand and the glass cup with their left hand. They pour the liquid from the tiger cup into the glass cup until it is full. Afterward, they place the tiger cup back in its original position, and with the glass cup in hand, they lift it out of the scene. When the glass cup returns from above, the liquid inside has decreased by half. The person then places the glass cup back in its original position. The person turns their entire body to the left side of the scene to adjust their sitting posture. The video ends.", "visual_elements": [ { "content": "The person picks up the tiger-patterned cup with their right hand.", "type": "action", "weight": 3 }, { "content": "The person picks up the glass cup with their left hand.", "type": "action", "weight": 3 }, { "content": "The person pours the liquid from the tiger cup into the glass cup until it is full.", "type": "action", "weight": 3 }, { "content": "The person places the tiger cup back in its original position.", "type": "action", "weight": 2 }, { "content": "The person lifts the glass cup out of the scene with their right hand.", "type": "action", "weight": 3 }, { "content": "When the glass cup returns from above, the liquid inside has decreased by half.", "type": "attribute", "weight": 3 }, { "content": "The person places the glass cup back in its original position.", "type": "action", "weight": 2 }, { "content": "The person turns their whole body to the left side of the scene, adjusting their sitting posture. The video ends.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0449", "video_path": "PerceptionTest/video_11577.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is shot from above a white table, which has a large blue plate, a pair of knives and forks, a white plate with 4 slices of ham, 3 slices of tomato, and 3 slices of yellow cheese, two eggs, a white plate with two slices of dry bread, a striped cup, and a pink plastic cup. A woman’s hand appears from the left side of the scene, with dark-colored nail polish on her nails.\nNext, the woman’s right hand picks up a slice of dry bread and places it on the large blue plate. At the same time, her left hand, which has a tattoo, rests on one side of the blue plate and then hovers. She then picks up another slice of dry bread with her right hand and places it on the other side of the blue plate, while her right hand hovers on the side of the plate.\nThe woman’s right hand reaches for the small white plate, picks up a slice of ham, and places it on the dry bread on the left side of the scene. The woman’s right hand picks up another slice of ham and places it on top of the first slice of ham. Then, the woman’s right hand places two slices of tomato on top of the ham. Meanwhile, her left hand disappears from the scene.\nThe woman’s right hand then picks up 3 slices of cheese from the small white plate, placing them on top of the tomato slices. Next, the woman uses both hands to flip the slice of dry bread on the right side of the scene and place it on top of the cheese slices, then moves the entire food item upwards in the scene.\nThen, the woman’s left hand picks up the pink plastic cup and pours the white liquid inside into the striped cup. After the liquid is poured, the woman shakes the pink plastic cup, then places it back on the table. Finally, both of the woman’s hands disappear completely from the scene.", "events": [ { "event": "At the beginning of the video, the camera is shot from above a white table, which has a large blue plate, a pair of knives and forks, a white plate with 4 slices of ham, 3 slices of tomato, and 3 slices of yellow cheese, two eggs, a white plate with two slices of dry bread, a striped cup, and a pink plastic cup. A woman’s hand appears from the left side of the scene, with dark-colored nail polish on her nails.", "visual_elements": [ { "content": "The camera is positioned overhead, focusing on a white table.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a blue large plate and a pair of knives and forks.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a white plate containing 4 slices of ham, 3 slices of tomato, and 3 slices of yellow cheese.", "type": "attribute", "weight": 3 }, { "content": "There are two eggs on the table and a white plate with two slices of dry bread.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a striped cup and a pink plastic cup.", "type": "attribute", "weight": 3 }, { "content": "A woman's hand appears on the left side of the scene, with dark nail polish on her nails.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, the woman’s right hand picks up a slice of dry bread and places it on the large blue plate. At the same time, her left hand, which has a tattoo, rests on one side of the blue plate and then hovers. She then picks up another slice of dry bread with her right hand and places it on the other side of the blue plate, while her right hand hovers on the side of the plate.", "visual_elements": [ { "content": "The woman picks up a slice of dry bread with her right hand and places it on the blue plate.", "type": "action", "weight": 3 }, { "content": "The woman’s tattooed left hand rests on one side of the blue plate and then hovers above it.", "type": "action", "weight": 2 }, { "content": "The woman picks up another slice of dry bread with her right hand and places it on the other side of the blue plate.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand rests on one side of the blue plate and then hovers above it.", "type": "action", "weight": 2 } ] }, { "event": "The woman’s right hand reaches for the small white plate, picks up a slice of ham, and places it on the dry bread on the left side of the scene. The woman’s right hand picks up another slice of ham and places it on top of the first slice of ham. Then, the woman’s right hand places two slices of tomato on top of the ham. Meanwhile, her left hand disappears from the scene.", "visual_elements": [ { "content": "The woman extends her right hand toward the white small plate, picks up a slice of ham, and places it on the dry bread on the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The woman picks up another slice of ham with her right hand and places it on top of the first slice.", "type": "action", "weight": 3 }, { "content": "The woman places two slices of tomato on the ham slices, one after the other.", "type": "action", "weight": 3 }, { "content": "The woman’s left hand disappears from the scene.", "type": "attribute", "weight": 2 } ] }, { "event": "The woman’s right hand then picks up 3 slices of cheese from the small white plate, placing them on top of the tomato slices. Next, the woman uses both hands to flip the slice of dry bread on the right side of the scene and place it on top of the cheese slices, then moves the entire food item upwards in the scene.", "visual_elements": [ { "content": "The woman’s right hand picks up three slices of cheese from the white plate and places them on the tomato slices one by one.", "type": "action", "weight": 3 }, { "content": "The woman uses both hands to flip the slice of dry bread on the right side of the scene and cover the cheese slices with it.", "type": "action", "weight": 3 }, { "content": "The woman moves the entire food upward in the scene with both hands.", "type": "action", "weight": 3 } ] }, { "event": "Then, the woman’s left hand picks up the pink plastic cup and pours the white liquid inside into the striped cup. After the liquid is poured, the woman shakes the pink plastic cup, then places it back on the table. Finally, both of the woman’s hands disappear completely from the scene.", "visual_elements": [ { "content": "The woman picks up the pink plastic cup with her left hand and pours the white liquid into the striped cup.", "type": "action", "weight": 3 }, { "content": "The woman shakes the pink plastic cup and then places it back on the table.", "type": "action", "weight": 3 }, { "content": "The woman’s hands completely disappear from the scene.", "type": "attribute", "weight": 3 } ] } ], "n_events": 5, "n_elements": 20 }, { "index": "TUNA_0450", "video_path": "PerceptionTest/video_1199.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 20.17, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on the area below a man’s face. He is wearing a white short-sleeve T-shirt and dark shorts, standing next to a red round table. From left to right across the table, there is a white lamp with a striped cap, a power strip, and a transparent cup. The background is a white wall.\nThe man’s left hand picks up the transparent plastic cup, and his right hand grips the bottle. The man’s left hand twists the blue cap open, then places the cap on the table. He then takes out a cylindrical item with a silver cap from the cup and places it on the table, followed by another cylindrical item with a red cap, and then another with a black cap. The three cylindrical items are arranged in a straight line.\nThe man’s right hand places the plastic cup back on the table, then both hands press down on a white plug on the power strip, and the right hand pulls the plug out. The white lamp turns off. The man then places the plug next to the power strip. The man’s right hand holds the glass cup upside down, while his left hand takes out a small bottle that slides out of the glass cup and places it on the table closest to the power strip.\nFinally, the man’s right hand lifts the black wire upwards, and his left hand presses the plug, then moves his left hand backward.", "events": [ { "event": "At the beginning of the video, the camera is focused on the area below a man’s face. He is wearing a white short-sleeve T-shirt and dark shorts, standing next to a red round table. From left to right across the table, there is a white lamp with a striped cap, a power strip, and a transparent cup. The background is a white wall.", "visual_elements": [ { "content": "The camera is facing the lower part of a man's face.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a white short-sleeve t-shirt and dark shorts, standing next to a red round table.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a white lamp with a striped cap, a power strip, and a transparent cup.", "type": "attribute", "weight": 3 }, { "content": "The background is a white wall.", "type": "scene", "weight": 2 } ] }, { "event": "The man’s left hand picks up the transparent plastic cup, and his right hand grips the bottle. The man’s left hand twists the blue cap open, then places the cap on the table. He then takes out a cylindrical item with a silver cap from the cup and places it on the table, followed by another cylindrical item with a red cap, and then another with a black cap. The three cylindrical items are arranged in a straight line.", "visual_elements": [ { "content": "The man picks up the transparent plastic cup with his left hand, holding the bottle with his right hand.", "type": "action", "weight": 3 }, { "content": "The man unscrews the blue bottle cap with his left hand and places the cap on the table.", "type": "action", "weight": 3 }, { "content": "The man takes out a cylindrical item with a silver cap from the glass cup and places it on the table.", "type": "action", "weight": 3 }, { "content": "The man takes out a cylindrical item with a red cap from the glass cup and places it on the table.", "type": "action", "weight": 3 }, { "content": "The man takes out a cylindrical item with a black cap from the glass cup and places it on the table.", "type": "action", "weight": 3 }, { "content": "The three cylindrical items are arranged in a straight line.", "type": "attribute", "weight": 2 } ] }, { "event": "The man’s right hand places the plastic cup back on the table, then both hands press down on a white plug on the power strip, and the right hand pulls the plug out. The white lamp turns off. The man then places the plug next to the power strip. The man’s right hand holds the glass cup upside down, while his left hand takes out a small bottle that slides out of the glass cup and places it on the table closest to the power strip.", "visual_elements": [ { "content": "The man places the plastic cup back on the table with his right hand and then presses a white plug on the power strip with both hands.", "type": "action", "weight": 3 }, { "content": "The man pulls out the plug with his right hand.", "type": "action", "weight": 3 }, { "content": "The white lamp goes out.", "type": "action", "weight": 3 }, { "content": "The man places the plug next to the power strip with his right hand.", "type": "action", "weight": 3 }, { "content": "The man holds the glass cup upside down with his right hand.", "type": "attribute", "weight": 3 }, { "content": "The man’s left hand takes out a small bottle that slides downward from the glass cup.", "type": "action", "weight": 3 }, { "content": "The man places the small bottle on the table, closest to the power strip, with his left hand.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the man’s right hand lifts the black wire upwards, and his left hand presses the plug, then moves his left hand backward.", "visual_elements": [ { "content": "The man lifts the black cable upward with his right hand.", "type": "action", "weight": 3 }, { "content": "The man presses the plug with his left hand.", "type": "action", "weight": 3 }, { "content": "The man moves his left hand backward.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0451", "video_path": "PerceptionTest/video_1251.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 11.83, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a person dressed in a dark blue coat, from just below the face to above the waist. The person is sitting next to a table covered with a floral-patterned tablecloth. On the left side of the scene, there is a silver teapot on the table, with a yellow piece of clothing underneath it. In front of the teapot, a blue pen is placed on top of the clothing. On the right side of the scene, there is a pink-covered book on the table.\nThe person’s right hand is clenched and placed on the table, with a yellow circular object next to it. The person opens their right hand, grabs the yellow circular object, and picks it up. Then, the person places the yellow circular object back on the table and allows it to roll freely towards the book. After the yellow circular object touches the book, it rolls back towards the camera, moving to the right side of the scene. The person moves the yellow circular object to the center of the scene, then presses down on it.\nThe person’s left hand takes the book below the table, and the left hand is also placed beneath the table. Then, the person’s right hand rolls the yellow circular object towards their body. The yellow circular object falls off the table. Finally, the person extends their right hand towards the camera, causing the teapot to be knocked and wobble during the process.", "events": [ { "event": "At the beginning of the video, the camera is focused on a person dressed in a dark blue coat, from just below the face to above the waist. The person is sitting next to a table covered with a floral-patterned tablecloth. On the left side of the scene, there is a silver teapot on the table, with a yellow piece of clothing underneath it. In front of the teapot, a blue pen is placed on top of the clothing. On the right side of the scene, there is a pink-covered book on the table.", "visual_elements": [ { "content": "The camera is focused on the area from the lower part of the face to the upper part of the waist of a person wearing a dark blue jacket.", "type": "camera", "weight": 3 }, { "content": "The person is sitting next to a table covered with a floral-patterned tablecloth.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a silver teapot.", "type": "attribute", "weight": 3 }, { "content": "Below the teapot, there is a yellow piece of clothing.", "type": "scene", "weight": 2 }, { "content": "In front of the teapot, a blue pen is placed on the piece of clothing.", "type": "scene", "weight": 2 }, { "content": "On the table, there is a book with a pink cover.", "type": "attribute", "weight": 3 } ] }, { "event": "The person’s right hand is clenched and placed on the table, with a yellow circular object next to it. The person opens their right hand, grabs the yellow circular object, and picks it up. Then, the person places the yellow circular object back on the table and allows it to roll freely towards the book. After the yellow circular object touches the book, it rolls back towards the camera, moving to the right side of the scene. The person moves the yellow circular object to the center of the scene, then presses down on it.", "visual_elements": [ { "content": "The person places their right fist on the table.", "type": "attribute", "weight": 3 }, { "content": "Next to the person’s right hand, there is a yellow round object.", "type": "attribute", "weight": 3 }, { "content": "The person opens their right hand, grabs the yellow round object, and picks it up.", "type": "action", "weight": 3 }, { "content": "The person places the yellow round object back on the table with their right hand.", "type": "action", "weight": 3 }, { "content": "The person rolls the yellow round object freely on the table toward the book.", "type": "action", "weight": 3 }, { "content": "The yellow round object touches the book and rolls back towards the camera, moving to the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The person moves the yellow round object to the center of the scene with their right hand.", "type": "action", "weight": 3 }, { "content": "The person presses down on the yellow round object with their right hand.", "type": "attribute", "weight": 3 } ] }, { "event": "The person’s left hand takes the book below the table, and the left hand is also placed beneath the table. Then, the person’s right hand rolls the yellow circular object towards their body. The yellow circular object falls off the table. Finally, the person extends their right hand towards the camera, causing the teapot to be knocked and wobble during the process.", "visual_elements": [ { "content": "The person’s left hand moves the book below the table.", "type": "action", "weight": 3 }, { "content": "The person’s left hand remains below the table.", "type": "action", "weight": 3 }, { "content": "The person rolls the yellow round object toward their body with their right hand.", "type": "action", "weight": 3 }, { "content": "The yellow round object falls below the table.", "type": "action", "weight": 3 }, { "content": "The person extends their right hand toward the camera, accidentally bumping the teapot.", "type": "action", "weight": 3 }, { "content": "The teapot shakes.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0452", "video_path": "PerceptionTest/video_1291.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.96, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on an open door. In front of the door is a light-colored table, with a black boot leaning against the wall on the table. A woman wearing a yellow short-sleeve shirt and light-colored jeans enters from the left side of the scene and walks to the table. She is facing the camera with the right side of her body, holding an orange cloth.\nNext, the woman places the orange cloth at the door with her right hand, and closes the door with her left hand. The cloth is wedged between the door and the door scene. The woman pulls the cloth with her right hand, then opens the door with her left hand. She turns to her right and exits the scene.\nThe woman enters the scene, holding a piece of white paper in her right hand. She places the paper at the door scene with her right hand, while her left hand closes the door, trapping the paper in the door. The woman lowers both hands and swings them backward, then pulls the paper out of the door gap with her right hand. The woman slightly opens the door with her left hand, then turns towards the camera and exits the scene. Meanwhile, the door wobbles slightly.\nLater, the woman enters the scene holding a black chair with her right hand, and opens the door with her left hand. She places the chair next to the table, adjusts its position, and then pushes the door forward with her left hand. She first places her right hand on the door, then lifts the chair and tilts it forward.", "events": [ { "event": "At the beginning of the video, the camera is focused on an open door. In front of the door is a light-colored table, with a black boot leaning against the wall on the table. A woman wearing a yellow short-sleeve shirt and light-colored jeans enters from the left side of the scene and walks to the table. She is facing the camera with the right side of her body, holding an orange cloth.", "visual_elements": [ { "content": "The camera is focused on an open door.", "type": "camera", "weight": 3 }, { "content": "In front of the door, there is a light-colored table with a black boot resting against the wall on the table.", "type": "scene", "weight": 2 }, { "content": "In the scene, there is a woman wearing a yellow short-sleeve top and light-colored jeans.", "type": "attribute", "weight": 3 }, { "content": "The woman enters from the left side of the scene and walks to the side of the table.", "type": "action", "weight": 3 }, { "content": "The woman is facing the camera with her right side, holding an orange cloth in her hand.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, the woman places the orange cloth at the door with her right hand, and closes the door with her left hand. The cloth is wedged between the door and the door scene. The woman pulls the cloth with her right hand, then opens the door with her left hand. She turns to her right and exits the scene.", "visual_elements": [ { "content": "The woman places the orange cloth at the doorframe with her right hand and closes the door with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman is holding the cloth with her right hand.", "type": "attribute", "weight": 3 }, { "content": "The cloth is caught in the doorscene.", "type": "attribute", "weight": 3 }, { "content": "The woman pulls the cloth with her right hand.", "type": "action", "weight": 2 }, { "content": "The woman opens the door with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman turns to her right and exits the scene.", "type": "action", "weight": 3 } ] }, { "event": "The woman enters the scene, holding a piece of white paper in her right hand. She places the paper at the door scene with her right hand, while her left hand closes the door, trapping the paper in the door. The woman lowers both hands and swings them backward, then pulls the paper out of the door gap with her right hand. The woman slightly opens the door with her left hand, then turns towards the camera and exits the scene. Meanwhile, the door wobbles slightly.", "visual_elements": [ { "content": "The woman enters the scene.", "type": "action", "weight": 3 }, { "content": "The woman is holding a white piece of paper in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The woman places the white paper at the doorframe with her right hand, while closing the door with her left hand.", "type": "action", "weight": 3 }, { "content": "The white paper is caught in the doorscene.", "type": "attribute", "weight": 3 }, { "content": "The woman lowers her hands and waves them backward, then uses her right hand to pull the white paper out of the door gap.", "type": "action", "weight": 3 }, { "content": "The woman slightly opens the door with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman turns toward the camera and exits the scene.", "type": "action", "weight": 3 }, { "content": "The door slightly shakes.", "type": "action", "weight": 2 } ] }, { "event": "Later, the woman enters the scene holding a black chair with her right hand, and opens the door with her left hand. She places the chair next to the table, adjusts its position, and then pushes the door forward with her left hand. She first places her right hand on the door, then lifts the chair and tilts it forward.", "visual_elements": [ { "content": "The woman enters the scene holding a black chair with her right hand.", "type": "action", "weight": 3 }, { "content": "The woman opens the door with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman places the chair next to the table with her right hand and adjusts its position.", "type": "action", "weight": 3 }, { "content": "The woman pushes the door forward with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman places her right hand on the door.", "type": "action", "weight": 3 }, { "content": "The woman lifts the chair with her right hand and tilts it forward.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0453", "video_path": "PerceptionTest/video_1331.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 26.59, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a black table, which has no items on it. In the background, a laptop is placed on the sofa, with some text displayed on the screen. There is a geometric-patterned cushion on the sofa.\nNext, a man dressed in dark clothes enters from the left side of the scene, walks up to the table, and faces the camera without revealing his face. He draws three and a half circles on the table with his left index finger, finally stopping at the center of the table directly in front of him.\nThe man lowers his left hand, retrieves a slice of bread from under the table, and places it in the top-right corner of the table. Then, he places a transparent small jar at the center of the table with his right hand. Next, he places a thick book on the right side of the table, directly in front of the slice of bread. Then, he places a white plug behind the jar. Finally, he places a black data cable behind the jar, in front of the plug.\nThe man first walks to the left side of the scene, then walks towards the front of the scene, and finally disappears from the scene.", "events": [ { "event": "At the beginning of the video, the camera focuses on a black table, which has no items on it. In the background, a laptop is placed on the sofa, with some text displayed on the screen. There is a geometric-patterned cushion on the sofa.", "visual_elements": [ { "content": "The camera focuses on a black table.", "type": "camera", "weight": 3 }, { "content": "There is nothing on the table.", "type": "attribute", "weight": 3 }, { "content": "A laptop is placed on the sofa.", "type": "scene", "weight": 2 }, { "content": "The laptop screen displays some text.", "type": "attribute", "weight": 2 }, { "content": "There is a geometric-patterned cushion on the sofa.", "type": "scene", "weight": 2 } ] }, { "event": "Next, a man dressed in dark clothes enters from the left side of the scene, walks up to the table, and faces the camera without revealing his face. He draws three and a half circles on the table with his left index finger, finally stopping at the center of the table directly in front of him.", "visual_elements": [ { "content": "A man dressed in dark clothes enters from the left side of the scene and walks to the table.", "type": "action", "weight": 3 }, { "content": "The man is facing the camera, but his face is not visible.", "type": "attribute", "weight": 3 }, { "content": "The man draws three and a half circles with his left index finger on the table, finally stopping at the point directly in front of his body.", "type": "action", "weight": 3 } ] }, { "event": "The man lowers his left hand, retrieves a slice of bread from under the table, and places it in the top-right corner of the table. Then, he places a transparent small jar at the center of the table with his right hand. Next, he places a thick book on the right side of the table, directly in front of the slice of bread. Then, he places a white plug behind the jar. Finally, he places a black data cable behind the jar, in front of the plug.", "visual_elements": [ { "content": "The man lowers his left hand, takes a piece of bread from underneath the table, and places it in the top-right corner of the table in the scene.", "type": "action", "weight": 3 }, { "content": "The man places a transparent small jar in the center of the table with his right hand.", "type": "action", "weight": 3 }, { "content": "The man places a thick book on the right side of the table in the scene.", "type": "action", "weight": 3 }, { "content": "The book is positioned directly in front of the piece of bread.", "type": "attribute", "weight": 3 }, { "content": "The man places a white plug behind the jar.", "type": "action", "weight": 3 }, { "content": "The man places a black data cable behind the jar and in front of the plug.", "type": "action", "weight": 3 } ] }, { "event": "The man first walks to the left side of the scene, then walks towards the front of the scene, and finally disappears from the scene.", "visual_elements": [ { "content": "The man walks to the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The man walks toward the front of the scene.", "type": "action", "weight": 3 }, { "content": "The man disappears from the scene.", "type": "attribute", "weight": 3 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0454", "video_path": "PerceptionTest/video_1350.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a white table. On the left side of the scene, there is a metal stand with a black data cable on it. Near the right side of the table in the scene, there is an upside-down black shoe. The background features a white wall and a closed window, with a radiator beneath the window. A person dressed in a black vest and white long-sleeved shirt enters from the right side of the scene, walks up to the table, and faces the table, holding some small items in their left hand.\nThe person uses their right hand to pick up a pen with a green cap from under the table, lifts it into the air and shakes it, then places it diagonally on the table with the cap pointing to the right side of the scene. Next, the person picks up a pen with a blue cap, lifts it into the air, shakes it, and then places it vertically on the table. After that, the person picks up a pen with a black cap, lifts it into the air, shakes it, and then places it diagonally on the table with the cap facing the camera.\nThe person then picks up a silver spoon from under the table with their right hand, lifts it into the air, flips it once, and then places the spoon on the table. The spoon is positioned between the pen with the green cap and the pen with the blue cap, pointing to the right side of the scene. Finally, the person retracts their right hand and places it straight by their right side.", "events": [ { "event": "At the beginning of the video, the camera focuses on a white table. On the left side of the scene, there is a metal stand with a black data cable on it. Near the right side of the table in the scene, there is an upside-down black shoe. The background features a white wall and a closed window, with a radiator beneath the window. A person dressed in a black vest and white long-sleeved shirt enters from the right side of the scene, walks up to the table, and faces the table, holding some small items in their left hand.", "visual_elements": [ { "content": "The camera focuses on a white table.", "type": "camera", "weight": 3 }, { "content": "On the left side of the scene, there is a metal stand with a black data cable on it.", "type": "scene", "weight": 2 }, { "content": "Near the right side of the table, there is a black shoe placed upside down.", "type": "scene", "weight": 2 }, { "content": "The background features a white wall and a closed window.", "type": "scene", "weight": 2 }, { "content": "There is a radiator beneath the window.", "type": "scene", "weight": 2 }, { "content": "A person dressed in a black vest and white long-sleeve shirt appears.", "type": "attribute", "weight": 3 }, { "content": "The person enters from the right side of the scene and walks to the side of the table.", "type": "action", "weight": 3 }, { "content": "The person faces the table, holding some smaller items in their left hand.", "type": "action", "weight": 3 } ] }, { "event": "The person uses their right hand to pick up a pen with a green cap from under the table, lifts it into the air and shakes it, then places it diagonally on the table with the cap pointing to the right side of the scene. Next, the person picks up a pen with a blue cap, lifts it into the air, shakes it, and then places it vertically on the table. After that, the person picks up a pen with a black cap, lifts it into the air, shakes it, and then places it diagonally on the table with the cap facing the camera.", "visual_elements": [ { "content": "The person picks up a pen with a green cap from under the table with their right hand.", "type": "action", "weight": 3 }, { "content": "The person raises the pen with the green cap in the air, shakes it, then places the pen at an angle on the table.", "type": "action", "weight": 3 }, { "content": "The green-capped pen, with the cap pointing to the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "The person picks up a pen with a blue cap with their right hand.", "type": "action", "weight": 3 }, { "content": "The person raises the pen with the blue cap in the air, shakes it, then places the pen vertically on the table.", "type": "action", "weight": 3 }, { "content": "The person picks up a pen with a black cap with their right hand.", "type": "action", "weight": 3 }, { "content": "The person raises the pen with the black cap in the air, shakes it, then places the pen at an angle on the table.", "type": "action", "weight": 3 }, { "content": "The black-capped pen, with the cap pointing towards the camera.", "type": "attribute", "weight": 3 } ] }, { "event": "The person then picks up a silver spoon from under the table with their right hand, lifts it into the air, flips it once, and then places the spoon on the table. The spoon is positioned between the pen with the green cap and the pen with the blue cap, pointing to the right side of the scene. Finally, the person retracts their right hand and places it straight by their right side.", "visual_elements": [ { "content": "The person picks up a silver spoon from under the table with their right hand.", "type": "action", "weight": 3 }, { "content": "The person raises the silver spoon in the air, flips it, then places the spoon on the table.", "type": "action", "weight": 3 }, { "content": "The spoon is positioned between the green-capped pen and the blue-capped pen, pointing to the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "The person withdraws their right hand and places it straight at their right side.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0455", "video_path": "PerceptionTest/video_1455.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 18.56, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a glass table, shot from the side. On the table, there is a white egg, a bunch of bananas, and a black object. The background features a white wall with a window, through which green plants are visible.\nA hand appears on the right side of the scene, belonging to a person wearing gray clothes, facing the table. The person grabs the bananas and turns them two full circles. The person retracts their right hand and extends their left hand to grip the egg. The person is wearing a light-colored watch on their left wrist. The person rotates the egg three times counterclockwise with their left hand.\nThen, the person picks up a black electric razor from the table with their right hand, operating the razor's button with their thumb. At the same time, the person extends their left hand to lightly touch the egg, then retracts it and disappears from the scene. The person places the razor back on the table with their right hand, then turns toward the camera and walks forward, leaving the scene.", "events": [ { "event": "At the beginning of the video, the camera is focused on a glass table, shot from the side. On the table, there is a white egg, a bunch of bananas, and a black object. The background features a white wall with a window, through which green plants are visible.", "visual_elements": [ { "content": "The camera focuses on a glass table, shot from the side of the table.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a white egg, a bunch of bananas, and a black object.", "type": "attribute", "weight": 3 }, { "content": "The background features a white wall, with a window on the wall and green plants outside.", "type": "scene", "weight": 2 } ] }, { "event": "A hand appears on the right side of the scene, belonging to a person wearing gray clothes, facing the table. The person grabs the bananas and turns them two full circles. The person retracts their right hand and extends their left hand to grip the egg. The person is wearing a light-colored watch on their left wrist. The person rotates the egg three times counterclockwise with their left hand.", "visual_elements": [ { "content": "A hand is positioned on the right side of the scene, belonging to a person wearing gray clothes.", "type": "attribute", "weight": 3 }, { "content": "The person faces the table.", "type": "attribute", "weight": 3 }, { "content": "The person's hand grabs the bananas and turns them two times.", "type": "action", "weight": 3 }, { "content": "The person withdraws their right hand and extends their left hand to hold the egg.", "type": "action", "weight": 3 }, { "content": "The person is wearing a light-colored watch on their left hand.", "type": "attribute", "weight": 2 }, { "content": "The person rotates the egg three times counterclockwise with their left hand.", "type": "action", "weight": 3 } ] }, { "event": "Then, the person picks up a black electric razor from the table with their right hand, operating the razor's button with their thumb. At the same time, the person extends their left hand to lightly touch the egg, then retracts it and disappears from the scene. The person places the razor back on the table with their right hand, then turns toward the camera and walks forward, leaving the scene.", "visual_elements": [ { "content": "The person picks up a black electric shaver from the table with their right hand.", "type": "action", "weight": 3 }, { "content": "The person operates the shaver's button with their right thumb.", "type": "action", "weight": 3 }, { "content": "The person extends their left hand to briefly touch the egg, then withdraws it.", "type": "action", "weight": 3 }, { "content": "The person’s left hand disappears from the scene.", "type": "attribute", "weight": 2 }, { "content": "The person places the shaver back on the table with their right hand.", "type": "action", "weight": 3 }, { "content": "The person turns toward the camera.", "type": "action", "weight": 3 }, { "content": "The person walks forward and exits the scene.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0456", "video_path": "PerceptionTest/video_1489.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 21.39, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a table covered with a blue and white checkered tablecloth. On the table, there is a glass jar containing white powder, a box of Dr. Oetker products, a transparent glass cup with a silver spoon inside, a black kettle, and two overripe bananas. The background features a light-colored wall.\nThen, a woman wearing a black top enters from the right side of the scene. She faces the table, extending both hands toward the item box. She presses one end of the box with her left hand and uses her right hand to take out a tea bag from the box. Holding the tea bag in her right hand and the white string in her left, she places the tea bag into the glass cup. Next, the woman grasps the kettle handle with her left hand, lifts it, and tilts the spout over the glass cup. She uses her right index finger to press the kettle lid. The woman then places the kettle back on the table and stirs the contents of the glass cup with the spoon in her right hand.\nFinally, the woman retracts her right hand, turns to the right side of the scene, and walks forward, disappearing from the scene.", "events": [ { "event": "At the beginning of the video, the camera is focused on a table covered with a blue and white checkered tablecloth. On the table, there is a glass jar containing white powder, a box of Dr. Oetker products, a transparent glass cup with a silver spoon inside, a black kettle, and two overripe bananas. The background features a light-colored wall.", "visual_elements": [ { "content": "The camera focuses on a table covered with a blue-and-white checkered tablecloth.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a glass jar containing white powder.", "type": "scene", "weight": 2 }, { "content": "On the table, there is a box of Dr. Oetker brand products.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a transparent glass cup with a silver spoon inside and a black kettle.", "type": "attribute", "weight": 3 }, { "content": "On the table, there are two overripe bananas.", "type": "scene", "weight": 2 }, { "content": "The background features a light-colored wall.", "type": "scene", "weight": 2 } ] }, { "event": "Then, a woman wearing a black top enters from the right side of the scene. She faces the table, extending both hands toward the item box. She presses one end of the box with her left hand and uses her right hand to take out a tea bag from the box. Holding the tea bag in her right hand and the white string in her left, she places the tea bag into the glass cup. Next, the woman grasps the kettle handle with her left hand, lifts it, and tilts the spout over the glass cup. She uses her right index finger to press the kettle lid. The woman then places the kettle back on the table and stirs the contents of the glass cup with the spoon in her right hand.", "visual_elements": [ { "content": "A woman wearing a black top enters from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The woman faces the table.", "type": "attribute", "weight": 3 }, { "content": "The woman reaches toward the item box on the table with both hands.", "type": "action", "weight": 3 }, { "content": "The woman presses one end of the box with her left hand and takes a tea bag from the box with her right hand.", "type": "action", "weight": 3 }, { "content": "The woman holds the tea bag with her right hand, grasps the white string with her left hand, and places the tea bag in the glass cup.", "type": "action", "weight": 3 }, { "content": "The woman holds the kettle's handle with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman lifts the kettle with her left hand and tilts the spout toward the glass cup.", "type": "action", "weight": 3 }, { "content": "The woman presses the kettle lid with her right index finger.", "type": "action", "weight": 3 }, { "content": "The woman places the kettle back on the table with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman stirs the contents of the glass cup with the handle of the spoon in her right hand.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the woman retracts her right hand, turns to the right side of the scene, and walks forward, disappearing from the scene.", "visual_elements": [ { "content": "The woman withdraws her right hand.", "type": "action", "weight": 3 }, { "content": "The woman turns to the right side of the scene and walks forward, leaving the scene.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0457", "video_path": "PerceptionTest/video_1510.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 19.57, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden table with a key and a bottle of mineral water, with the water bottle positioned closer to the right side of the scene. Behind the table on the left side of the scene, there is a chair with a deep red backrest. In the background, there are two leather chairs, a closed door, and a white wall.\nThen, a person wearing an orange short-sleeved T-shirt enters from the right side of the scene and walks to the table, facing it. After standing still, the person’s right hand reaches into the left side of the scene to take out a medium-sized yellow-green orange, which is noticeably shaking. The person then places the orange on the table, positioned at the lower left of the scene, closer to the camera than the water bottle. Next, the person’s right hand takes out a small yellow-green orange from the left side of the scene and places it behind the medium-sized orange. Then, the person’s right hand picks up a green leaf from the left side of the scene and places it on the table, positioned at the far left of the scene.\nThe person retracts their right hand, takes a large step to the right side of the scene, and then walks forward, disappearing from the scene.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden table with a key and a bottle of mineral water, with the water bottle positioned closer to the right side of the scene. Behind the table on the left side of the scene, there is a chair with a deep red backrest. In the background, there are two leather chairs, a closed door, and a white wall.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a key and a bottle of mineral water.", "type": "scene", "weight": 2 }, { "content": "The mineral water bottle is positioned closer to the right side of the scene.", "type": "attribute", "weight": 2 }, { "content": "There is a chair with a dark red backrest.", "type": "attribute", "weight": 2 }, { "content": "In the background, there are two leather chairs, a closed door, and a white wall.", "type": "scene", "weight": 2 } ] }, { "event": "Then, a person wearing an orange short-sleeved T-shirt enters from the right side of the scene and walks to the table, facing it. After standing still, the person’s right hand reaches into the left side of the scene to take out a medium-sized yellow-green orange, which is noticeably shaking. The person then places the orange on the table, positioned at the lower left of the scene, closer to the camera than the water bottle. Next, the person’s right hand takes out a small yellow-green orange from the left side of the scene and places it behind the medium-sized orange. Then, the person’s right hand picks up a green leaf from the left side of the scene and places it on the table, positioned at the far left of the scene.", "visual_elements": [ { "content": "A person wearing an orange short-sleeve T-shirt enters from the right side of the scene and walks to the table.", "type": "action", "weight": 3 }, { "content": "The person faces the table.", "type": "attribute", "weight": 3 }, { "content": "The person’s right hand takes a medium-sized, yellow-green orange from the left side of the scene, shaking it noticeably.", "type": "action", "weight": 3 }, { "content": "The person places the orange on the table with their right hand.", "type": "action", "weight": 3 }, { "content": "The orange is positioned at the bottom-left of the scene, closer to the camera than the mineral water bottle.", "type": "attribute", "weight": 3 }, { "content": "The person’s right hand takes a small yellow-green orange from the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The person places the small orange behind the medium-sized orange.", "type": "action", "weight": 3 }, { "content": "The person’s right hand picks up a green leafy vegetable from the left side of the scene and places it on the table.", "type": "action", "weight": 3 }, { "content": "The leafy vegetable is positioned at the far left side of the scene.", "type": "attribute", "weight": 3 } ] }, { "event": "The person retracts their right hand, takes a large step to the right side of the scene, and then walks forward, disappearing from the scene.", "visual_elements": [ { "content": "The person withdraws their right hand.", "type": "action", "weight": 3 }, { "content": "The person takes a large step toward the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The person walks forward, leaving the scene.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0458", "video_path": "PerceptionTest/video_1513.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 19.28, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is facing a table covered with a patterned tablecloth. On the table, there is a book, a pen, a desk lamp, a box of items, and a small melon. The background is a white tiled wall, which reflects a person’s shadow. There is a power outlet on the wall with a black plug connecting the desk lamp. On the left side of the scene, a hand touches the top of the desk lamp, and the orange-yellow light turns on. The hand then presses the top of the lamp twice.\nThe person’s right hand picks up the pen on the table, places it vertically with the tip facing down, and then makes the pen hit the table, repeating this action four times before placing the pen back on the table. Then, the person’s left hand lightly touches the top of the desk lamp, causing the light to turn off. The person’s left hand grasps the plug cord and swings it. Afterward, the person’s right hand picks up the book, and the left hand flips the pages.\nThe person touches the top of the desk lamp again, and the light turns on. The person then presses the top of the lamp twice more, increasing the brightness of the lamp. Finally, the person retracts their left hand and completely disappears from the scene. The camera shakes noticeably.", "events": [ { "event": "At the beginning of the video, the camera is facing a table covered with a patterned tablecloth. On the table, there is a book, a pen, a desk lamp, a box of items, and a small melon. The background is a white tiled wall, which reflects a person’s shadow. There is a power outlet on the wall with a black plug connecting the desk lamp. On the left side of the scene, a hand touches the top of the desk lamp, and the orange-yellow light turns on. The hand then presses the top of the lamp twice.", "visual_elements": [ { "content": "The camera is facing directly toward a table covered with a patterned tablecloth.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a book, a pen, and a desk lamp.", "type": "attribute", "weight": 3 }, { "content": "There is a box of items and a small melon on the table.", "type": "scene", "weight": 2 }, { "content": "The background is a white tiled wall, with the shadow of a person reflected on the wall.", "type": "scene", "weight": 2 }, { "content": "There is a power outlet on the wall, with a black plug connected to the desk lamp inserted into it.", "type": "scene", "weight": 2 }, { "content": "A hand touches the top of the desk lamp.", "type": "action", "weight": 3 }, { "content": "The orange-yellow light turns on.", "type": "attribute", "weight": 3 }, { "content": "The hand presses the top of the desk lamp twice more.", "type": "action", "weight": 3 } ] }, { "event": "The person’s right hand picks up the pen on the table, places it vertically with the tip facing down, and then makes the pen hit the table, repeating this action four times before placing the pen back on the table. Then, the person’s left hand lightly touches the top of the desk lamp, causing the light to turn off. The person’s left hand grasps the plug cord and swings it. Afterward, the person’s right hand picks up the book, and the left hand flips the pages.", "visual_elements": [ { "content": "The person’s right hand picks up the pen from the table, places it upright with the tip facing down.", "type": "action", "weight": 3 }, { "content": "The person makes the pen hit the table by tapping it down and repeats the action four times.", "type": "action", "weight": 3 }, { "content": "The person places the pen back on the table.", "type": "action", "weight": 3 }, { "content": "The person’s left hand gently touches the top of the desk lamp, causing the light to turn off.", "type": "action", "weight": 3 }, { "content": "The person’s left hand grips the power cord and swings it.", "type": "action", "weight": 3 }, { "content": "The person’s right hand picks up the book, and the left hand flips through the pages.", "type": "action", "weight": 3 } ] }, { "event": "The person touches the top of the desk lamp again, and the light turns on. The person then presses the top of the lamp twice more, increasing the brightness of the lamp. Finally, the person retracts their left hand and completely disappears from the scene. The camera shakes noticeably.", "visual_elements": [ { "content": "The person touches the top of the desk lamp again, causing the light to turn on.", "type": "action", "weight": 3 }, { "content": "The person touches the top of the desk lamp two more times.", "type": "action", "weight": 3 }, { "content": "The brightness of the desk lamp increases.", "type": "attribute", "weight": 3 }, { "content": "The person withdraws their left hand.", "type": "action", "weight": 3 }, { "content": "The person completely disappears from the scene.", "type": "attribute", "weight": 3 }, { "content": "The camera shakes noticeably.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0459", "video_path": "PerceptionTest/video_1540.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 21.66, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is positioned sideways toward a wooden table, on which there is a white piece of paper, a yellow pencil, and a pink water cup. The background features a pink wall and a black connecting cable.\nThen, a pair of hands appears at the bottom right of the scene. The person’s right hand picks up the pencil on the table, and their left hand presses down on the paper with fingers spread. The person’s right hand starts writing from left to right on the paper. The right hand first writes the uppercase letter \"B\", followed by the uppercase letter \"A\", and finally the uppercase letter \"R\", forming the word \"BAR\". Afterward, the person picks up the paper with both hands, displays it toward the camera, and then places it back on the table.\nThe person’s left hand takes a blue \"B\" from underneath the table and places it on the table, while the right hand takes a red \"A\" from beneath the table and places it on the table. Then, the person’s right hand takes a blue \"R\" and places it on the table. After placing the letters, the person’s hands leave the scene.", "events": [ { "event": "At the beginning of the video, the camera is positioned sideways toward a wooden table, on which there is a white piece of paper, a yellow pencil, and a pink water cup. The background features a pink wall and a black connecting cable.", "visual_elements": [ { "content": "The camera is positioned to the side, focusing on a wooden table.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a white sheet of paper and a yellow pencil.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a pink water cup.", "type": "scene", "weight": 2 }, { "content": "The background features a pink wall and a black connecting cable.", "type": "scene", "weight": 2 } ] }, { "event": "Then, a pair of hands appears at the bottom right of the scene. The person’s right hand picks up the pencil on the table, and their left hand presses down on the paper with fingers spread. The person’s right hand starts writing from left to right on the paper. The right hand first writes the uppercase letter \"B\", followed by the uppercase letter \"A\", and finally the uppercase letter \"R\", forming the word \"BAR\". Afterward, the person picks up the paper with both hands, displays it toward the camera, and then places it back on the table.", "visual_elements": [ { "content": "A pair of hands appear in the lower-right corner of the scene.", "type": "attribute", "weight": 3 }, { "content": "The person’s right hand picks up the pencil from the table, while their left hand presses the paper down with fingers pointing downward.", "type": "action", "weight": 3 }, { "content": "The person’s right hand begins writing from left to right on the paper.", "type": "action", "weight": 3 }, { "content": "The person’s right hand first writes the uppercase letter \"B.\"", "type": "action", "weight": 3 }, { "content": "The person’s right hand writes the uppercase letter \"A.\"", "type": "action", "weight": 3 }, { "content": "The person’s right hand writes the uppercase letter \"R.\"", "type": "action", "weight": 3 }, { "content": "The three letters form the word \"BAR.\"", "type": "attribute", "weight": 3 }, { "content": "The person lifts the paper with both hands and shows it towards the camera.", "type": "action", "weight": 3 }, { "content": "The person places the paper back on the table with both hands.", "type": "action", "weight": 3 } ] }, { "event": "The person’s left hand takes a blue \"B\" from underneath the table and places it on the table, while the right hand takes a red \"A\" from beneath the table and places it on the table. Then, the person’s right hand takes a blue \"R\" and places it on the table. After placing the letters, the person’s hands leave the scene.", "visual_elements": [ { "content": "The person’s left hand takes a blue \"B\" from beneath the table and places it on the table.", "type": "action", "weight": 3 }, { "content": "The person’s right hand takes a red \"A\" from beneath the table and places it on the table.", "type": "action", "weight": 3 }, { "content": "The person’s right hand takes a blue \"R\" and places it on the table.", "type": "action", "weight": 3 }, { "content": "The person’s hands leave the scene.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0460", "video_path": "PerceptionTest/video_1541.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 28.87, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is directed at a woman from the front, capturing the lower part of her face. She is wearing a yellow-green short-sleeve top with a necklace around her neck. She faces a stone table, on which, from left to right, there is a transparent jar with a red lid containing white powder, a fork, an egg, a transparent glass jar without a lid, and four fruits, including two dark green fruits, a coconut, and a dark red fruit. The background features a white wall with some papers with writing on them. On the left side of the scene, there is a light-colored door with a silver door handle.\nFirst, the woman picks up the egg with her right hand, gently taps it on the edge of the glass jar, then cracks the eggshell with both hands and pours the egg white and yolk into the glass jar. She places one half of the eggshell on top of the other half and then places it next to the fork. Next, she picks up the jar with the red lid with her right hand and pours white powder into the glass jar five times above the jar, then places the jar back on the table.\nThe woman picks up a dark green fruit with her left hand and hands it to her right hand. She places the dark green fruit on top of the jar with the red lid. Then, she picks up the fork with her right hand and stirs the mixture in the glass jar counterclockwise. Finally, the woman extends her left hand toward the camera, and the camera shakes slightly.", "events": [ { "event": "At the beginning of the video, the camera is directed at a woman from the front, capturing the lower part of her face. She is wearing a yellow-green short-sleeve top with a necklace around her neck. She faces a stone table, on which, from left to right, there is a transparent jar with a red lid containing white powder, a fork, an egg, a transparent glass jar without a lid, and four fruits, including two dark green fruits, a coconut, and a dark red fruit. The background features a white wall with some papers with writing on them. On the left side of the scene, there is a light-colored door with a silver door handle.", "visual_elements": [ { "content": "The camera is positioned facing a woman, capturing the area below her face.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a yellow-green short-sleeve top and a necklace around her neck.", "type": "attribute", "weight": 3 }, { "content": "The woman is facing a stone-topped table.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a transparent jar with white powder inside and a red lid.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a fork, an egg, and a transparent glass jar without a lid.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a dark green fruit.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a dark green fruit, a coconut, and a dark red fruit.", "type": "scene", "weight": 2 }, { "content": "The background is a white wall.", "type": "scene", "weight": 2 }, { "content": "There are papers with writing on them pasted on the wall.", "type": "scene", "weight": 2 }, { "content": "On the left side of the scene, there is a light-colored door with a silver doorknob.", "type": "scene", "weight": 2 } ] }, { "event": "First, the woman picks up the egg with her right hand, gently taps it on the edge of the glass jar, then cracks the eggshell with both hands and pours the egg white and yolk into the glass jar. She places one half of the eggshell on top of the other half and then places it next to the fork. Next, she picks up the jar with the red lid with her right hand and pours white powder into the glass jar five times above the jar, then places the jar back on the table.", "visual_elements": [ { "content": "The woman’s right hand picks up the egg and gently taps the edge of the glass jar.", "type": "action", "weight": 3 }, { "content": "The woman uses both hands to crack the eggshell and pours the egg white and yolk into the glass jar.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand places one half of the eggshell on top of the other half.", "type": "action", "weight": 2 }, { "content": "The woman places the eggshell next to the fork.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand picks up the jar with the red lid and pours white powder into the glass jar five times from above.", "type": "action", "weight": 3 }, { "content": "The woman places the jar back on the table.", "type": "action", "weight": 3 } ] }, { "event": "The woman picks up a dark green fruit with her left hand and hands it to her right hand. She places the dark green fruit on top of the jar with the red lid. Then, she picks up the fork with her right hand and stirs the mixture in the glass jar counterclockwise. Finally, the woman extends her left hand toward the camera, and the camera shakes slightly.", "visual_elements": [ { "content": "The woman’s left hand picks up a dark green fruit.", "type": "action", "weight": 3 }, { "content": "The woman’s left hand hands the dark green fruit to her right hand.", "type": "action", "weight": 2 }, { "content": "The woman’s right hand places the dark green fruit on top of the jar with the red lid.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand picks up the fork and stirs the mixture in the glass jar counterclockwise.", "type": "action", "weight": 3 }, { "content": "The woman extends her left hand towards the camera.", "type": "action", "weight": 3 }, { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0461", "video_path": "PerceptionTest/video_155.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 11.43, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is positioned above a wooden table, slightly shaking. On the table, there is a small object, two inverted white plastic cups arranged in a straight line, and a book with a frog pattern on the cover. The frog-patterned book covers another, larger book beneath it. In the background, on the right side of the scene, there is an electronic display screen showing some menu options. A person sitting beside the table is wearing dark pants and holding a white plastic cup in their right hand.\nThe person slightly shakes the cup with their right hand, then turns the cup upside down on the table, covering the small object. Next, they pick up the second cup with their right hand and place it upside down on the table. Then, they pick up the third cup with their right hand and place it upside down on the table.\nNext, the person drags the frog-patterned book towards themselves with their right hand, picks it up, and then places it sideways, tapping the spine of the book on another book on the table. Then, the person holds the book with their left hand and flips through the pages with their right hand. The person places the book back on the table with their right hand. Finally, the person holds the first paper cup with their right hand.", "events": [ { "event": "At the beginning of the video, the camera is positioned above a wooden table, slightly shaking. On the table, there is a small object, two inverted white plastic cups arranged in a straight line, and a book with a frog pattern on the cover. The frog-patterned book covers another, larger book beneath it. In the background, on the right side of the scene, there is an electronic display screen showing some menu options. A person sitting beside the table is wearing dark pants and holding a white plastic cup in their right hand.", "visual_elements": [ { "content": "The camera is positioned above a wooden table and shakes slightly.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a small item and two white plastic cups placed upside down in a straight line.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a book with a frog design on the cover.", "type": "attribute", "weight": 3 }, { "content": "The book with the frog design on the cover is covering another larger book underneath.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the scene, there is an electronic display screen showing some menu options.", "type": "scene", "weight": 2 }, { "content": "A person wearing dark pants is sitting beside the table.", "type": "attribute", "weight": 3 }, { "content": "The person’s right hand holds a white plastic cup.", "type": "attribute", "weight": 3 } ] }, { "event": "The person slightly shakes the cup with their right hand, then turns the cup upside down on the table, covering the small object. Next, they pick up the second cup with their right hand and place it upside down on the table. Then, they pick up the third cup with their right hand and place it upside down on the table.", "visual_elements": [ { "content": "The person’s right hand slightly shakes the cup.", "type": "action", "weight": 2 }, { "content": "The person’s right hand turns the cup upside down on the table, covering the small item.", "type": "action", "weight": 3 }, { "content": "The person’s right hand picks up the second cup and places it upside down on the table.", "type": "action", "weight": 3 }, { "content": "The person’s right hand picks up the third cup and places it upside down on the table again.", "type": "action", "weight": 3 } ] }, { "event": "Next, the person drags the frog-patterned book towards themselves with their right hand, picks it up, and then places it sideways, tapping the spine of the book on another book on the table. Then, the person holds the book with their left hand and flips through the pages with their right hand. The person places the book back on the table with their right hand. Finally, the person holds the first paper cup with their right hand.", "visual_elements": [ { "content": "The person’s right hand drags the book with the frog design towards their body.", "type": "action", "weight": 3 }, { "content": "The person’s right hand picks up the book with the frog design on the cover.", "type": "action", "weight": 3 }, { "content": "The person’s right hand places the book with the frog design on its side and taps the spine of the larger book on the table.", "type": "action", "weight": 3 }, { "content": "The person’s left hand holds the book with the frog design, and the right hand flips through the pages.", "type": "action", "weight": 3 }, { "content": "The person’s right hand places the book with the frog design back on the table.", "type": "action", "weight": 3 }, { "content": "The person’s right hand grips the first paper cup.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0462", "video_path": "PerceptionTest/video_1561.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 23.95, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is positioned sideways to a table. On the table, there is a yellow bowl, a seasoning bottle with white powder, a white egg, a banana, and a glass cup. There are also four blue uppercase letter paper pieces on the table, forming the word \"LIFE\". The background features a metal door and yellow walls.\nA woman is positioned on the left side of the scene, standing in front of the table, with her upper body not fully in the shot. She picks up a white egg from the table with her right hand and holds it above the yellow bowl. She gently taps the egg on the edge of the bowl with her right hand, then uses both hands to break the egg, letting the egg white and yolk fall into the bowl. She places the eggshell beside the bowl with her right hand.\nThe woman then places the letter \"I\" in front of \"L\", then places the letter \"F\" after \"E\". She moves the letter \"L\" in front of \"I\", and finally places \"F\" in front of \"E\". The four letters move around but ultimately still form the word \"LIFE\". She then drags the four letters to the left side of the scene.\nThe woman picks up the seasoning bottle with her right hand while holding the bottom of the bottle with her left hand. She opens the bottle's cap with her right hand and pours some white powder into the bowl. She passes the seasoning bottle to her left hand, and her left hand places the bottle back on the table. The woman then picks up a fork with her right hand, presses the edge of the bowl with her left hand, and stirs the mixture in the bowl with her right hand. After stirring, she taps the edge of the bowl a few times with the fork, then places the fork upright on the table. Her left hand rests on the table.", "events": [ { "event": "At the beginning of the video, the camera is positioned sideways to a table. On the table, there is a yellow bowl, a seasoning bottle with white powder, a white egg, a banana, and a glass cup. There are also four blue uppercase letter paper pieces on the table, forming the word \"LIFE\". The background features a metal door and yellow walls.", "visual_elements": [ { "content": "The camera is positioned sideways, focusing on a table.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a yellow bowl, a condiment bottle containing white powder, and a white egg.", "type": "attribute", "weight": 3 }, { "content": "On the table, there is a banana and a glass cup.", "type": "scene", "weight": 2 }, { "content": "There are four blue capital letter paper pieces on the table.", "type": "attribute", "weight": 3 }, { "content": "The four letter pieces form the English word \"LIFE\".", "type": "attribute", "weight": 3 }, { "content": "The background consists of a metal door and yellow walls.", "type": "scene", "weight": 2 } ] }, { "event": "A woman is positioned on the left side of the scene, standing in front of the table, with her upper body not fully in the shot. She picks up a white egg from the table with her right hand and holds it above the yellow bowl. She gently taps the egg on the edge of the bowl with her right hand, then uses both hands to break the egg, letting the egg white and yolk fall into the bowl. She places the eggshell beside the bowl with her right hand.", "visual_elements": [ { "content": "A woman is positioned on the left side of the scene, standing in front of the table.", "type": "attribute", "weight": 3 }, { "content": "The woman’s upper body is not fully in the shot.", "type": "attribute", "weight": 3 }, { "content": "The woman’s right hand picks up a white egg from the table and stops above the yellow bowl.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand gently taps the edge of the bowl with the egg.", "type": "action", "weight": 3 }, { "content": "The woman uses both hands to crack the egg, letting the egg white and yolk fall into the bowl.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand places the eggshell beside the bowl.", "type": "action", "weight": 3 } ] }, { "event": "The woman then places the letter \"I\" in front of \"L\", then places the letter \"F\" after \"E\". She moves the letter \"L\" in front of \"I\", and finally places \"F\" in front of \"E\". The four letters move around but ultimately still form the word \"LIFE\". She then drags the four letters to the left side of the scene.", "visual_elements": [ { "content": "The woman places the letter \"I\" in front of the \"L\".", "type": "action", "weight": 3 }, { "content": "The woman places the \"F\" after the \"E\".", "type": "action", "weight": 3 }, { "content": "The woman places the \"L\" in front of the \"I\".", "type": "action", "weight": 3 }, { "content": "The woman places the \"F\" in front of the \"E\".", "type": "action", "weight": 3 }, { "content": "The four letters move around but ultimately still form the word \"LIFE\".", "type": "attribute", "weight": 2 }, { "content": "The woman drags the four letters to the left side of the scene.", "type": "action", "weight": 3 } ] }, { "event": "The woman picks up the seasoning bottle with her right hand while holding the bottom of the bottle with her left hand. She opens the bottle's cap with her right hand and pours some white powder into the bowl. She passes the seasoning bottle to her left hand, and her left hand places the bottle back on the table. The woman then picks up a fork with her right hand, presses the edge of the bowl with her left hand, and stirs the mixture in the bowl with her right hand. After stirring, she taps the edge of the bowl a few times with the fork, then places the fork upright on the table. Her left hand rests on the table.", "visual_elements": [ { "content": "The woman’s right hand picks up the condiment bottle while her left hand grips the bottom of it.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand opens the condiment bottle lid and pours some white powder into the bowl.", "type": "action", "weight": 3 }, { "content": "The woman hands the condiment bottle to her left hand.", "type": "action", "weight": 2 }, { "content": "The woman’s left hand places the condiment bottle back on the table.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand picks up a fork.", "type": "action", "weight": 2 }, { "content": "The woman’s left hand presses down on the edge of the bowl.", "type": "action", "weight": 2 }, { "content": "The woman’s right hand stirs the mixture in the bowl.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand taps the edge of the bowl several times with the fork.", "type": "action", "weight": 3 }, { "content": "The woman’s right hand places the fork facing up on the table.", "type": "action", "weight": 3 }, { "content": "The woman’s left hand is placed on the table.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0463", "video_path": "PerceptionTest/video_1579.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 22.33, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused directly on a black tabletop with wooden legs. On the table, there is a white sheet of paper, a pink pen, and a pink cup. The pen is placed on top of the paper. The background is a piece of fabric with red and blue patterns.\nThe camera shakes, and a hand enters from the right side of the scene, gripping the pen while rotating both the pen and paper. The hand then picks up the pen and writes the letters P, I, and G on the white paper, forming the English word \"PIG\".\nAfter writing, the hand picks up the paper and moves it towards the camera. The camera shakes back and forth. Then, the hand places the paper back on the table and sets the pen on the paper.\nThe camera shakes. The hand takes a white letter \"P\" from the right side of the scene and places it on the table. The hand then takes a white letter \"I\" and places it next to the letter \"P\" on the table, followed by the letter \"G\" next to the letter \"I\". Finally, the hand disappears from the scene. The camera shakes slightly left and right.", "events": [ { "event": "At the beginning of the video, the camera is focused directly on a black tabletop with wooden legs. On the table, there is a white sheet of paper, a pink pen, and a pink cup. The pen is placed on top of the paper. The background is a piece of fabric with red and blue patterns.", "visual_elements": [ { "content": "The camera is facing directly toward a black tabletop with wooden legs.", "type": "camera", "weight": 3 }, { "content": "On the table, there is a white piece of paper and a pink pen.", "type": "attribute", "weight": 3 }, { "content": "There is a pink cup on the table.", "type": "scene", "weight": 2 }, { "content": "The pen is on top of the paper.", "type": "attribute", "weight": 3 }, { "content": "The background is a fabric with red and blue patterns.", "type": "scene", "weight": 2 } ] }, { "event": "The camera shakes, and a hand enters from the right side of the scene, gripping the pen while rotating both the pen and paper. The hand then picks up the pen and writes the letters P, I, and G on the white paper, forming the English word \"PIG\".", "visual_elements": [ { "content": "The camera shakes.", "type": "camera", "weight": 3 }, { "content": "A hand enters from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The hand grasps the pen and simultaneously rotates the pen and paper.", "type": "action", "weight": 3 }, { "content": "The hand picks up the pen and writes the letters \"P\", \"I\", and \"G\" in order on the white paper.", "type": "action", "weight": 3 }, { "content": "The three letters form the English word \"PIG\".", "type": "attribute", "weight": 3 } ] }, { "event": "After writing, the hand picks up the paper and moves it towards the camera. The camera shakes back and forth. Then, the hand places the paper back on the table and sets the pen on the paper.", "visual_elements": [ { "content": "The hand lifts the paper and moves it toward the camera.", "type": "action", "weight": 3 }, { "content": "The camera shakes back and forth.", "type": "camera", "weight": 3 }, { "content": "The hand places the paper back on the table.", "type": "action", "weight": 3 }, { "content": "The hand places the pen on the paper.", "type": "action", "weight": 3 } ] }, { "event": "The camera shakes. The hand takes a white letter \"P\" from the right side of the scene and places it on the table. The hand then takes a white letter \"I\" and places it next to the letter \"P\" on the table, followed by the letter \"G\" next to the letter \"I\". Finally, the hand disappears from the scene. The camera shakes slightly left and right.", "visual_elements": [ { "content": "The camera shakes.", "type": "camera", "weight": 3 }, { "content": "The hand takes a white letter \"P\" from the right side of the scene and places it on the table.", "type": "action", "weight": 3 }, { "content": "The hand takes a white letter \"I\" and places it next to the letter \"P\" on the table.", "type": "action", "weight": 3 }, { "content": "The hand takes a white letter \"G\" and places it next to the letter \"I\" on the table.", "type": "action", "weight": 3 }, { "content": "The hand disappears from the scene.", "type": "action", "weight": 3 }, { "content": "The camera shakes slightly left and right.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0464", "video_path": "PerceptionTest/video_1703.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 26.9, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is pointed at a granite table. On the table, from left to right, there is a blue pen, a piece of white paper and a transparent glass. A woman wearing a dark blue short-sleeved shirt sits in front of the granite table. The background is a white tile wall.\nThe woman places her hands on the white paper, then picks up a blue pen and begins to write on the white paper. The woman first draws a capital letter \"P\", then the letter \"I\", and finally the letter \"G\", spelling the word \"PIG\". After writing, the woman raises the paper and shows it to the camera. Then, the woman puts the paper back on the table.\nThen, the camera moves slightly to the right. The woman first extends her right hand to the left of the screen. Then, she uses her right hand to place the blue foam letter blocks \"T\", \"V\" and \"C\" in a row on the table.\nAt the end of the video, the woman reaches out her right hand towards the camera to turn off the camera.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a granite table. On the table, from left to right, there is a blue pen, a piece of white paper and a transparent glass. A woman wearing a dark blue short-sleeved shirt sits in front of the granite table. The background is a white tile wall.", "visual_elements": [ { "content": "The camera is pointed at a granite table.", "type": "camera", "weight": 3 }, { "content": "On the table, from left to right, there is a blue pen, a white paper and a transparent glass cup.", "type": "attribute", "weight": 3 }, { "content": "A woman wearing a dark blue short-sleeved shirt is sitting in front of the granite table.", "type": "attribute", "weight": 3 }, { "content": "The background is a white tile wall.", "type": "scene", "weight": 3 } ] }, { "event": "The woman places her hands on the white paper, then picks up a blue pen and begins to write on the white paper. The woman first draws a capital letter \"P\", then the letter \"I\", and finally the letter \"G\", spelling the word \"PIG\". After writing, the woman raises the paper and shows it to the camera. Then, the woman puts the paper back on the table.", "visual_elements": [ { "content": "The woman picks up a blue pen and begins to write on the white paper.", "type": "action", "weight": 3 }, { "content": "The woman first draws a capital letter \"P\", then the letter \"I\", and finally the letter \"G\".", "type": "action", "weight": 3 }, { "content": "After writing, the woman raises the paper and shows it to the camera.", "type": "action", "weight": 2 }, { "content": "Then, the woman puts the paper back on the table.", "type": "action", "weight": 1 } ] }, { "event": "Then, the camera moves slightly to the right. The woman first extends her right hand to the left of the screen. Then, she uses her right hand to place the blue foam letter blocks \"T\", \"V\" and \"C\" in a row on the table.", "visual_elements": [ { "content": "Then, the camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The woman first extends her right hand to the left side of the screen.", "type": "action", "weight": 2 }, { "content": "Then, the woman uses her right hand to arrange the blue foam letter blocks \"T\", \"V\" and \"C\" on the table in order.", "type": "action", "weight": 3 } ] }, { "event": "At the end of the video, the woman reaches out her right hand towards the camera to turn off the camera.", "visual_elements": [ { "content": "At the end of the video, the woman extends her right hand to the camera to turn off the camera.", "type": "action", "weight": 1 } ] } ], "n_events": 4, "n_elements": 12 }, { "index": "TUNA_0465", "video_path": "PerceptionTest/video_1730.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 9.19, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a pair of hairy hands are standing a book on a wooden table. There are some words and a barcode on the back cover of the book, and a yellow price tag is attached to the lower right corner. The background is a living room. From the right to the left of the screen, there are a blue office chair, a TV, a small Christmas tree with decorations and an open white door. There are clothes hanging messily on another white sofa in front of the door.\nThe left hand of the hands holds the back of the book, and the right hand holds a metal spoon to support the top of the book. The hands slowly place the spoon on the top of the book, with the handle of the spoon facing the right side of the screen. Finally, the hands take out an orange pen from the top of the screen and display it horizontally in front of the camera.", "events": [ { "event": "At the beginning of the video, a pair of hairy hands are standing a book on a wooden table. There are some words and a barcode on the back cover of the book, and a yellow price tag is attached to the lower right corner. The background is a living room. From the right to the left of the screen, there are a blue office chair, a TV, a small Christmas tree with decorations and an open white door. There are clothes hanging messily on another white sofa in front of the door.", "visual_elements": [ { "content": "In the picture, a pair of hairy hands are holding a book on a wooden table.", "type": "action", "weight": 3 }, { "content": "There are some words and barcodes on the back cover of the book.", "type": "attribute", "weight": 1 }, { "content": "The background is a living room.", "type": "scene", "weight": 3 }, { "content": "From right to left, there are a blue office chair, a TV, a small Christmas tree with decorations and an open white door.", "type": "scene", "weight": 2 }, { "content": "There are clothes hanging messily on another white sofa in front of the door.", "type": "scene", "weight": 1 } ] }, { "event": "The left hand of the hands holds the back of the book, and the right hand holds a metal spoon to support the top of the book. The hands slowly place the spoon on the top of the book, with the handle of the spoon facing the right side of the screen. Finally, the hands take out an orange pen from the top of the screen and display it horizontally in front of the camera.", "visual_elements": [ { "content": "The left hand of the hands holds the back of the book, and the right hand holds a metal spoon to support the top of the book.", "type": "action", "weight": 3 }, { "content": "The hands slowly put the spoon on the top of the book.", "type": "action", "weight": 3 }, { "content": "Finally, the hands take out an orange pen from the top of the picture and put it horizontally in front of the camera.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0466", "video_path": "PerceptionTest/video_1740.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 31.02, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera pointing at a dark brown wooden table. In the foreground, from left to right, are a colorful plate, a glass, and a carton of milk in a green package. On the upper right side of the table is a transparent salt shaker with a blue lid. In front of the left side of the salt shaker is a bowl with three eggs in a blue pattern. Directly above the table is a white bowl and a fork. On the upper left side of the table are three colorful letter magnets. The three letter magnets are a green \"X\", a yellow \"O\", and a blue \"B\". A person wearing a black and white T-shirt and blue jeans sits behind the table. The person holds the sides of the table. The background is a white tiled floor. There are some stuffed toys on the floor on the left side of the picture.\nFirst, the person rearranges the letter tiles, slanting from left to right to form a \"BOX\". Next, the person picks up an egg with right hand and taps it gently on the edge of the colorful patterned plate. Then, the person cracks the egg into an empty bowl with both hands and places the eggshell on the table. Then, the person picks up the salt shaker with right hand and sprinkles some salt into the egg liquid in the bowl. Then, the person stirs the egg liquid in the bowl with a fork in right hand and holds the edge of the bowl with left hand. While stirring, the left hand slightly lifts the bowl. The egg yolk and egg white gradually mix evenly to form a yellow egg liquid. After stirring, the person puts the fork back on the table with right hand.\nAt the end of the video, this person extends right hand to the left of the screen to turn off the camera.", "events": [ { "event": "The video begins with the camera pointing at a dark brown wooden table. In the foreground, from left to right, are a colorful plate, a glass, and a carton of milk in a green package. On the upper right side of the table is a transparent salt shaker with a blue lid. In front of the left side of the salt shaker is a bowl with three eggs in a blue pattern. Directly above the table is a white bowl and a fork. On the upper left side of the table are three colorful letter magnets. The three letter magnets are a green \"X\", a yellow \"O\", and a blue \"B\". A person wearing a black and white T-shirt and blue jeans sits behind the table. The person holds the sides of the table. The background is a white tiled floor. There are some stuffed toys on the floor on the left side of the picture.", "visual_elements": [ { "content": "The camera is pointed at a dark brown wooden table.", "type": "camera", "weight": 3 }, { "content": "In the foreground of the camera, from left to right, there is a colorful plate, a glass, and a box of milk in green packaging.", "type": "attribute", "weight": 3 }, { "content": "On the upper right of the table is a transparent salt shaker with a blue lid.", "type": "attribute", "weight": 2 }, { "content": "In front of the left of the salt shaker is a bowl with three eggs and a blue pattern.", "type": "attribute", "weight": 2 }, { "content": "Directly above the table is a white bowl and a fork.", "type": "attribute", "weight": 2 }, { "content": "On the upper left of the table are three colorful letter magnets.", "type": "attribute", "weight": 2 }, { "content": "Three letter magnets, green \"X\", yellow \"O\" and blue \"B\"", "type": "attribute", "weight": 1 }, { "content": "A person wearing a black and white T-shirt and blue jeans sits behind a desk.", "type": "attribute", "weight": 3 }, { "content": "The person holds the sides of the desk with both hands.", "type": "action", "weight": 3 }, { "content": "The background is a white tiled floor.", "type": "scene", "weight": 2 }, { "content": "There are some stuffed toys on the floor on the left side of the picture.", "type": "scene", "weight": 1 } ] }, { "event": "First, the person rearranges the letter tiles, slanting from left to right to form a \"BOX\". Next, the person picks up an egg with right hand and taps it gently on the edge of the colorful patterned plate. Then, the person cracks the egg into an empty bowl with both hands and places the eggshell on the table. Then, the person picks up the salt shaker with right hand and sprinkles some salt into the egg liquid in the bowl. Then, the person stirs the egg liquid in the bowl with a fork in right hand and holds the edge of the bowl with left hand. While stirring, the left hand slightly lifts the bowl. The egg yolk and egg white gradually mix evenly to form a yellow egg liquid. After stirring, the person puts the fork back on the table with right hand.", "visual_elements": [ { "content": "First, the person rearranges the letter tiles, slanting from left to right to form the word \"BOX\".", "type": "action", "weight": 2 }, { "content": "Next, the right hand picks up an egg and tap it gently on the edge of the colorful patterned plate.", "type": "action", "weight": 3 }, { "content": "Then both hands crack the eggs into an empty bowl and place the eggshells on the table.", "type": "action", "weight": 3 }, { "content": "Then, the right hand picks up the salt shaker and sprinkles some salt into the egg liquid in the bowl.", "type": "action", "weight": 3 }, { "content": "Then, the right hand stirs the egg liquid in fork, and the left hand holds the edge of the bowl.", "type": "action", "weight": 3 }, { "content": "During the stirring process, the left hand lifts the bowl slightly.", "type": "attribute", "weight": 1 }, { "content": "The egg yolk and egg white gradually mix evenly to form a yellow egg liquid.", "type": "action", "weight": 1 }, { "content": "After stirring, the right hand puts the fork back on the table.", "type": "action", "weight": 2 } ] }, { "event": "At the end of the video, this person extends right hand to the left of the screen to turn off the camera.", "visual_elements": [ { "content": "At the end of the video, the right hand extends to the left of the screen to turn off the camera.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0467", "video_path": "PerceptionTest/video_1780.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 21.82, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focused on a white table. On the table is a black remote control and a white phone. In the background is a modern living room. The brown wooden floor of the living room reflects the light. There is a light-colored carpet with an intricate pattern at the bottom right of the white table. There is an area decorated with white stones at the top right of the screen.\nThen, the camera shakes slightly. A black and rose-red bag appears from the left side of the screen and is placed on a white table. A hand in orange clothes lowers the bag's strap and moves away from the screen. Then, the hand places a white charging cable next to the bag.\nThe hand then shows a book with an orange, white and blue cover in front of the camera. There is some black and white text on the cover of the book. Finally, the hand places the book on the far left side of the white table.", "events": [ { "event": "The video begins with the camera focused on a white table. On the table is a black remote control and a white phone. In the background is a modern living room. The brown wooden floor of the living room reflects the light. There is a light-colored carpet with an intricate pattern at the bottom right of the white table. There is an area decorated with white stones at the top right of the screen.", "visual_elements": [ { "content": "The camera is pointed at a white table.", "type": "camera", "weight": 3 }, { "content": "A black remote control and a white mobile phone are placed on the table.", "type": "attribute", "weight": 2 }, { "content": "A modern living room is in the background.", "type": "scene", "weight": 3 }, { "content": "The brown wooden floor of the living room reflects the light.", "type": "attribute", "weight": 1 }, { "content": "There is a light-colored carpet with an intricate pattern at the bottom right of the white table.", "type": "scene", "weight": 2 }, { "content": "There is an area decorated with white stones at the top right of the picture.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera shakes slightly. A black and rose-red bag appears from the left side of the screen and is placed on a white table. A hand in orange clothes lowers the bag's strap and moves away from the screen. Then, the hand places a white charging cable next to the bag.", "visual_elements": [ { "content": "Then, the camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "A black and rose-red bag appears from the left side of the screen and is placed on a white table.", "type": "attribute", "weight": 3 }, { "content": "A hand in orange clothes lowers the bag's strap and moves away from the screen.", "type": "action", "weight": 3 }, { "content": "Then, the hand places a white charging cable next to the bag.", "type": "action", "weight": 3 } ] }, { "event": "The hand then shows a book with an orange, white and blue cover in front of the camera. There is some black and white text on the cover of the book. Finally, the hand places the book on the far left side of the white table.", "visual_elements": [ { "content": "Then, the hand shows a book with an orange, white and blue cover in front of the camera.", "type": "action", "weight": 3 }, { "content": "There are some black and white words on the cover of the book.", "type": "attribute", "weight": 1 }, { "content": "Finally, the hand places the book on the far left of the white table.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0468", "video_path": "PerceptionTest/video_1835.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.99, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a wooden table. There is a stainless steel kettle on the left side of the table. There is a white patterned plate with two slices of bread on the front right side of the table. There is an orange tea bag box on the left side of the plate. There are some food in front of the left side of the plate, including two bananas, two tomatoes and two eggs. There is a white patterned water cup and a spoon on the left side of the food. There is a stainless steel can and a red plastic bowl full of white solid particles on the right side of the food. In the background, there is a sofa behind the table. The green cushions of the sofa are covered with white cloth.\nThen, a black man in blue pants appears from the right side of the screen. The man sits on the sofa. The man opens the tea bag box with both hands, takes out a tea bag from it, and then puts the tea bag into the patterned cup with his right hand. Then, the man opens the lid of the stainless steel pot with his left hand, puts a tomato on the table into the pot with his right hand, and picks up the kettle with his right hand to pour water into the cup. Then, the man puts down the kettle, picks up the spoon with his right hand and gently stirs the tea in the cup. Then, the man scoops a spoonful of white particles from the stainless steel can with his right hand, adds it to the tea, and stirs it again. Finally, the man puts the spoon back to its original place.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden table. There is a stainless steel kettle on the left side of the table. There is a white patterned plate with two slices of bread on the front right side of the table. There is an orange tea bag box on the left side of the plate. There are some food in front of the left side of the plate, including two bananas, two tomatoes and two eggs. There is a white patterned water cup and a spoon on the left side of the food. There is a stainless steel can and a red plastic bowl full of white solid particles on the right side of the food. In the background, there is a sofa behind the table. The green cushions of the sofa are covered with white cloth.", "visual_elements": [ { "content": "The camera is pointed at a wooden table.", "type": "camera", "weight": 3 }, { "content": "There is a stainless steel kettle on the left side of the table.", "type": "attribute", "weight": 2 }, { "content": "There is a white patterned plate with two slices of bread on the front right side of the table.", "type": "attribute", "weight": 2 }, { "content": "There is an orange tea bag box on the left side of the plate.", "type": "attribute", "weight": 2 }, { "content": "There are some food in front left side of the plate, including two bananas, two tomatoes and two eggs.", "type": "attribute", "weight": 2 }, { "content": "There is a patterned white water cup and a spoon on the left side of the food.", "type": "attribute", "weight": 2 }, { "content": "There is a stainless steel can and a red plastic bowl full of white solid particles on the right side of the food.", "type": "attribute", "weight": 2 }, { "content": "There is a sofa behind the table in the background.", "type": "scene", "weight": 2 }, { "content": "The green cushions of the sofa are covered with white cloth.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, a black man in blue pants appears from the right side of the screen. The man sits on the sofa. The man opens the tea bag box with both hands, takes out a tea bag from it, and then puts the tea bag into the patterned cup with his right hand. Then, the man opens the lid of the stainless steel pot with his left hand, puts a tomato on the table into the pot with his right hand, and picks up the kettle with his right hand to pour water into the cup. Then, the man puts down the kettle, picks up the spoon with his right hand and gently stirs the tea in the cup. Then, the man scoops a spoonful of white particles from the stainless steel can with his right hand, adds it to the tea, and stirs it again. Finally, the man puts the spoon back to its original place.", "visual_elements": [ { "content": "Then, a black man in blue pants appears from the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man sits on the sofa.", "type": "action", "weight": 3 }, { "content": "The man opens the tea bag box with both hands and takes out a tea bag from it.", "type": "action", "weight": 3 }, { "content": "Then the man puts the tea bag into the cup with a pattern with his right hand.", "type": "action", "weight": 3 }, { "content": "Then, the man opens the lid of the stainless steel pot with his left hand and puts a tomato on the table into the pot with his right hand.", "type": "action", "weight": 3 }, { "content": "The man picks up the kettle with his right hand and pours water into the cup.", "type": "action", "weight": 3 }, { "content": "Then, the man puts down the kettle and picks up the spoon with his right hand to gently stir the tea in the cup.", "type": "action", "weight": 3 }, { "content": "Then, the man scoops a spoonful of white particles from the stainless steel can with his right hand, adds it to the tea, and stirs it again.", "type": "action", "weight": 3 }, { "content": "Finally, the man puts the spoon back to its original place.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0469", "video_path": "PerceptionTest/video_1888.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focused on a kitchen. On the right side of the screen is a transparent glass table. On the table are some items, including a curved table lamp, chargers, and some small tools. On the right side of the screen is a white wall with sockets and white windows. In the background, the kitchen's background wall is covered with gray and white tiles. The cabinets on the background wall are light wood. On the counter are some washing supplies and tableware. On the left side of the screen is a courtyard with trees. Outside the courtyard is a piece of red cloth.\nA middle-aged Asian man enters the picture from the right side and stands behind the glass table. The man is wearing a purple T-shirt and trousers, and a watch on his wrist. The man picks up a pen with his left hand, then switches the pen to his right hand and taps the table, then switches the pen to his left hand and picks up a knife with his right hand and spins it on the table. Then, the man puts the pen and knife down. The man picks up the charger and plugs it into the wall socket. Then, the man picks up the pen on the table with his left hand and taps the table again. Then, the man spins the knife on the table with his right hand again. Finally, the man presses the button of the desk lamp on the table to turn on the light.\nThe video ends with the man walking out of the frame to the right.", "events": [ { "event": "The video begins with the camera focused on a kitchen. On the right side of the screen is a transparent glass table. On the table are some items, including a curved table lamp, chargers, and some small tools. On the right side of the screen is a white wall with sockets and white windows. In the background, the kitchen's background wall is covered with gray and white tiles. The cabinets on the background wall are light wood. On the counter are some washing supplies and tableware. On the left side of the screen is a courtyard with trees. Outside the courtyard is a piece of red cloth.", "visual_elements": [ { "content": "The camera is pointed at a kitchen.", "type": "camera", "weight": 3 }, { "content": "There is a transparent glass table on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "There are some items on the table, including a curved table lamp, a charger and some small tools.", "type": "attribute", "weight": 1 }, { "content": "There are sockets and white windows on the white wall on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "In the background, the kitchen background wall is covered with gray and white tiles.", "type": "scene", "weight": 1 }, { "content": "The cabinets on the background wall are light wood.", "type": "scene", "weight": 2 }, { "content": "There are some washing supplies and tableware on the counter.", "type": "scene", "weight": 1 }, { "content": "There is a courtyard with trees on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "There is a piece of red cloth hanging outside the courtyard.", "type": "scene", "weight": 1 } ] }, { "event": "A middle-aged Asian man enters the picture from the right side and stands behind the glass table. The man is wearing a purple T-shirt and trousers, and a watch on his wrist. The man picks up a pen with his left hand, then switches the pen to his right hand and taps the table, then switches the pen to his left hand and picks up a knife with his right hand and spins it on the table. Then, the man puts the pen and knife down. The man picks up the charger and plugs it into the wall socket. Then, the man picks up the pen on the table with his left hand and taps the table again. Then, the man spins the knife on the table with his right hand again. Finally, the man presses the button of the desk lamp on the table to turn on the light.", "visual_elements": [ { "content": "A middle-aged Asian man enters the screen from the right side and stands behind the glass table.", "type": "action", "weight": 3 }, { "content": "The man wears a purple T-shirt and trousers and a watch on his wrist.", "type": "attribute", "weight": 3 }, { "content": "The man picks up a pen with his left hand, then switches the pen to his right hand and taps the table.", "type": "action", "weight": 3 }, { "content": "Then, the man takes the pen with his left hand and picks up a knife with his right hand and spins it on the table.", "type": "action", "weight": 3 }, { "content": "Then, the man puts the pen and knife down.", "type": "action", "weight": 2 }, { "content": "The man picks up the charger and plugs it into the wall socket.", "type": "action", "weight": 3 }, { "content": "Then, the man picks up the pen on the table with his left hand and taps the table again.", "type": "action", "weight": 3 }, { "content": "Then, the man puts down the pen with his left hand and spins the knife on the table with his right hand again.", "type": "action", "weight": 2 }, { "content": "Finally, the man puts down the knife and presses the button on the table lamp to turn on the light.", "type": "action", "weight": 3 } ] }, { "event": "The video ends with the man walking out of the frame to the right.", "visual_elements": [ { "content": "At the end of the video, the man walks out of the screen to the right side of the screen.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0470", "video_path": "PerceptionTest/video_1903.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 18.42, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is pointed at a table covered with a blue and white checkered tablecloth. On the left side of the table is an open cardboard box, and on the right side of the table are several dolls of different colors, a black water cup and a black remote control. In the background, a fruit chart, sockets and some electrical equipment are hung on the white wall. On the right side of the screen is a khaki door. A black woman wearing a purple patterned dress and a gold bracelet on her arm appears on the right side of the screen.\nThe woman first puts a white bunny doll into the carton. Then, she puts a gray plush toy, a yellow and black plush toy, a small object wrapped in orange and white plastic, a black remote control, an orange-wrapped tissue, and a black cylindrical object into the carton. Finally, the woman closes the lid of the carton and presses it with her hand.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a table covered with a blue and white checkered tablecloth. On the left side of the table is an open cardboard box, and on the right side of the table are several dolls of different colors, a black water cup and a black remote control. In the background, a fruit chart, sockets and some electrical equipment are hung on the white wall. On the right side of the screen is a khaki door. A black woman wearing a purple patterned dress and a gold bracelet on her arm appears on the right side of the screen.", "visual_elements": [ { "content": "The camera is pointed at a table covered with a blue and white checkered tablecloth.", "type": "camera", "weight": 3 }, { "content": "On the left side of the table is an open cardboard box.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the table are several dolls of different colors, a black water cup and a black remote control.", "type": "attribute", "weight": 2 }, { "content": "In the background, a fruit chart, sockets and some electrical equipment are hung on the white wall.", "type": "scene", "weight": 3 }, { "content": "On the right side of the screen is a khaki door.", "type": "scene", "weight": 1 }, { "content": "A black woman wearing a purple patterned dress and a gold bracelet on her arm appears on the right side of the screen.", "type": "attribute", "weight": 2 } ] }, { "event": "The woman first puts a white bunny doll into the carton. Then, she puts a gray plush toy, a yellow and black plush toy, a small object wrapped in orange and white plastic, a black remote control, an orange-wrapped tissue, and a black cylindrical object into the carton. Finally, the woman closes the lid of the carton and presses it with her hand.", "visual_elements": [ { "content": "The woman first puts a white bunny doll into the carton.", "type": "action", "weight": 3 }, { "content": "Then, she puts a gray plush toy, a yellow and black plush toy, a small object wrapped in orange and white plastic, a black remote control, an orange-wrapped tissue, and a black cylindrical object into the carton.", "type": "action", "weight": 3 }, { "content": "Finally, the woman closes the lid of the carton and presses it with her hand.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0471", "video_path": "PerceptionTest/video_1997.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 25.56, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a person sitting in a chair. The person is wearing a blue short-sleeved shirt with white patterns, a black and white shirt with pink floral patterns underneath, and black shorts. The person has his hands on his legs, and a pink towel is spread on the table in front of him, on which is a grater, a spoon, and a colander. The camera is fixed on the person's upper body and thighs. The scene is in a simple room with brown wooden floors. There is a blue and white electric fan and an old shelf full of miscellaneous items on the left side of the screen.\nIn the video, the man begins to adjust his shirt with both hands, and then starts to button the shirt one by one from top to bottom.\nThe video ends with the man placing his hands back on his legs before extending his right hand to the left of the frame and turning off the camera.", "events": [ { "event": "At the beginning of the video, the camera is focused on a person sitting in a chair. The person is wearing a blue short-sleeved shirt with white patterns, a black and white shirt with pink floral patterns underneath, and black shorts. The person has his hands on his legs, and a pink towel is spread on the table in front of him, on which is a grater, a spoon, and a colander. The camera is fixed on the person's upper body and thighs. The scene is in a simple room with brown wooden floors. There is a blue and white electric fan and an old shelf full of miscellaneous items on the left side of the screen.", "visual_elements": [ { "content": "The camera is focused on a person sitting on a chair.", "type": "camera", "weight": 3 }, { "content": "The person is wearing a blue short-sleeved shirt with white patterns, a black and white shirt with pink floral patterns underneath, and black shorts.", "type": "attribute", "weight": 3 }, { "content": "The person has his hands on his legs.", "type": "action", "weight": 3 }, { "content": "A pink towel is spread on the table in front of the person, on which is placed a grater, a spoon, and a colander.", "type": "attribute", "weight": 2 }, { "content": "The camera is fixed on the person's upper body and thighs.", "type": "camera", "weight": 3 }, { "content": "The scene is in a simple room with brown wooden floors.", "type": "scene", "weight": 3 }, { "content": "There is a blue and white electric fan and an old shelf full of miscellaneous items on the left side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "In the video, the man begins to adjust his shirt with both hands, and then starts to button the shirt one by one from top to bottom.", "visual_elements": [ { "content": "In the video, the person begins to adjust his shirt with both hands.", "type": "action", "weight": 2 }, { "content": "Then, the person begins to button up his shirt one by one from top to bottom.", "type": "action", "weight": 3 } ] }, { "event": "The video ends with the man placing his hands back on his legs before extending his right hand to the left of the frame and turning off the camera.", "visual_elements": [ { "content": "At the end of the video, the person puts his hands back on his legs.", "type": "action", "weight": 1 }, { "content": "Then, the person extends his right hand to the left side of the screen and turns off the camera.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0472", "video_path": "PerceptionTest/video_2.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 31.33, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera pointing at a half-open brown wooden door. Outside the door is a floor covered with brown and white tiles with geometric patterns. On the right side of the screen, there is an old slipper on the tiles outside the door. The floor inside the door is covered with white tiles. On the right side of the screen, there is a piece of furniture covered with a dark gray cloth with yellow lines.\nThe camera keeps a low angle. A person's leg appears from the right side of the screen. The person's legs are bare, and the toes are painted with purple nail polish. The person bends down and places a white piece of paper at the door. Then the person slowly closes the door, and the paper is squeezed onto the tiles outside the door. The person picks up the paper and disappears to the right side of the screen.\nThen, the person's legs appear from the right side of the screen again. The person bends down again and places a white plastic bag at the door. Then the person closes the door again, and the plastic bag is squeezed out of the door. Then, the person picks up the plastic bag and legs disappear from the right side of the screen. Then, the person's legs appear again. The person bends down and places a blue towel at the door. Then the person closes the door again, and the towel is squeezed out of the door. Finally, the person leaves the screen again.", "events": [ { "event": "The video begins with the camera pointing at a half-open brown wooden door. Outside the door is a floor covered with brown and white tiles with geometric patterns. On the right side of the screen, there is an old slipper on the tiles outside the door. The floor inside the door is covered with white tiles. On the right side of the screen, there is a piece of furniture covered with a dark gray cloth with yellow lines.", "visual_elements": [ { "content": "The camera is pointed at a half-open brown wooden door.", "type": "camera", "weight": 3 }, { "content": "Outside the door is a floor covered with brown and white tiles with geometric patterns.", "type": "scene", "weight": 2 }, { "content": "On the right side of the screen, there is an old slipper on the tiles outside the door.", "type": "scene", "weight": 1 }, { "content": "The floor inside the door is covered with white tiles.", "type": "scene", "weight": 2 }, { "content": "On the right side of the screen, there is a piece of furniture covered with a dark gray cloth with yellow lines.", "type": "scene", "weight": 1 } ] }, { "event": "The camera keeps a low angle. A person's leg appears from the right side of the screen. The person's legs are bare, and the toes are painted with purple nail polish. The person bends down and places a white piece of paper at the door. Then the person slowly closes the door, and the paper is squeezed onto the tiles outside the door. The person picks up the paper and disappears to the right side of the screen.", "visual_elements": [ { "content": "The camera keeps a low angle.", "type": "camera", "weight": 3 }, { "content": "A person's leg appears from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The person's legs are bare, and toes are painted with purple nail polish.", "type": "attribute", "weight": 1 }, { "content": "The person bends down and places a white piece of paper at the door.", "type": "action", "weight": 3 }, { "content": "Then the person slowly closes the door, and the piece of paper is squeezed onto the tiles outside the door.", "type": "action", "weight": 3 }, { "content": "The person picks up the piece of paper and disappears from the right side of the screen.", "type": "action", "weight": 2 } ] }, { "event": "Then, the person's legs appear from the right side of the screen again. The person bends down again and places a white plastic bag at the door. Then the person closes the door again, and the plastic bag is squeezed out of the door. Then, the person picks up the plastic bag and legs disappear from the right side of the screen. Then, the person's legs appear again. The person bends down and places a blue towel at the door. Then the person closes the door again, and the towel is squeezed out of the door. Finally, the person leaves the screen again.", "visual_elements": [ { "content": "Then, the person's legs appear from the right side of the screen again.", "type": "action", "weight": 3 }, { "content": "The person bends down again and places a white plastic bag at the door.", "type": "action", "weight": 3 }, { "content": "Then the person closes the door again, and the plastic bag is squeezed out of the door.", "type": "action", "weight": 3 }, { "content": "Then, the person picks up the plastic bag and legs disappear from the right side of the screen.", "type": "action", "weight": 2 }, { "content": "Then, the person's legs appear again.", "type": "action", "weight": 3 }, { "content": "The person bends down and places a blue towel at the door.", "type": "action", "weight": 3 }, { "content": "Then the person closes the door again, and the towel is squeezed out of the door.", "type": "action", "weight": 3 }, { "content": "Finally, the person leaves the screen again.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0473", "video_path": "PerceptionTest/video_2005.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 22.39, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a pair of feet wearing two different shoes. The right foot is wearing a blue canvas shoe with a white logo, and the left foot is wearing a black sneaker. The shoelaces of the two shoes are scattered on the floor. The owner of the feet is wearing a pair of black pants with the legs rolled up to reveal the ankles. The background is an indoor environment with a wooden floor. There are some white papers scattered on the floor next to the white wall on the right side of the screen.\nThe camera keeps shooting at a low angle. A pair of hands appears at the top of the screen. The owner of the hands is wearing a gray long-sleeved shirt. The hands reach for the black sneakers on the left foot and start to tie the shoelaces. After tying the left shoelaces, the hands start to tie the shoelaces on the right foot. After tying the shoelaces, the person's right hand reaches to the upper left of the screen to turn off the camera.", "events": [ { "event": "At the beginning of the video, the camera is focused on a pair of feet wearing two different shoes. The right foot is wearing a blue canvas shoe with a white logo, and the left foot is wearing a black sneaker. The shoelaces of the two shoes are scattered on the floor. The owner of the feet is wearing a pair of black pants with the legs rolled up to reveal the ankles. The background is an indoor environment with a wooden floor. There are some white papers scattered on the floor next to the white wall on the right side of the screen.", "visual_elements": [ { "content": "The camera is focused on a pair of feet wearing two different shoes.", "type": "camera", "weight": 3 }, { "content": "The right foot is wearing a blue canvas shoe with a white icon.", "type": "attribute", "weight": 3 }, { "content": "The left foot is wearing a black sneaker.", "type": "attribute", "weight": 3 }, { "content": "The laces of the two shoes are scattered on the floor.", "type": "attribute", "weight": 2 }, { "content": "The owner of the feet is wearing a pair of black pants with the legs rolled up to reveal the ankles.", "type": "attribute", "weight": 1 }, { "content": "The background is an indoor environment with a wooden floor.", "type": "scene", "weight": 3 }, { "content": "There are some white papers scattered on the floor next to the white wall on the right side of the picture.", "type": "scene", "weight": 2 } ] }, { "event": "The camera keeps shooting at a low angle. A pair of hands appears at the top of the screen. The owner of the hands is wearing a gray long-sleeved shirt. The hands reach for the black sneakers on the left foot and start to tie the shoelaces. After tying the left shoelaces, the hands start to tie the shoelaces on the right foot. After tying the shoelaces, the person's right hand reaches to the upper left of the screen to turn off the camera.", "visual_elements": [ { "content": "The camera keeps shooting at a low angle.", "type": "camera", "weight": 3 }, { "content": "A pair of hands appears at the top of the screen.", "type": "attribute", "weight": 3 }, { "content": "The owner of the hands is wearing a gray long-sleeved shirt.", "type": "attribute", "weight": 1 }, { "content": "The hands begin to tie the laces of the black sneakers on the left foot.", "type": "action", "weight": 3 }, { "content": "After tying the laces on the left foot, the hands begin to tie the laces on the right foot.", "type": "action", "weight": 3 }, { "content": "After tying the laces, the person's right hand reaches to the upper left of the screen and turns off the camera.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0474", "video_path": "PerceptionTest/video_2052.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with a camera looking down at a kitchen counter. On the left side of the screen is a white electric stove. On the lower left of the screen is a stainless steel kettle. To the left of the kettle is a colorful ceramic spoon holder with a floral pattern. On the upper left of the screen are two slices of bread with an apple sandwiched between them on a blue stand on the stove. On the right side of the screen is a khaki kitchen counter. From top to bottom, on the kitchen counter are a glass jar with a green lid filled with pink sweetener packets, a glass jar with an orange seal containing tea bags, red and blue ceramic spice jars, a white cup, and a metal spoon. On the kitchen counter at the top of the screen, there is a row of spice bottles.\nNext, a hand enters the picture from the right side of the frame. The owner of this hand is wearing a black shirt with a pattern. The right hand first places the orange glass jar on the kitchen counter to the right of the green-lid glass jar, and then places the red and blue ceramic spice jars to the right of the orange glass jar in a row. Then, the hand takes out the two sealed tea bags from the glass jar with an orange seal. Then, both hands tear off a tea bag and put it into the white cup. Then, the person's right hand picks up the kettle, opens the lid of the kettle, and pours water into the cup. Then, the person's left hand picks up a metal spoon and stirs the tea bag in the cup. After stirring a few times, the hand puts down the spoon and takes a torn pink sweetener packet from the green-lid glass jar and pours it into the cup. Finally, the left hand puts the sweetener packet back on the green lid of the glass jar and stirs it again with the spoon.", "events": [ { "event": "The video begins with a camera looking down at a kitchen counter. On the left side of the screen is a white electric stove. On the lower left of the screen is a stainless steel kettle. To the left of the kettle is a colorful ceramic spoon holder with a floral pattern. On the upper left of the screen are two slices of bread with an apple sandwiched between them on a blue stand on the stove. On the right side of the screen is a khaki kitchen counter. From top to bottom, on the kitchen counter are a glass jar with a green lid filled with pink sweetener packets, a glass jar with an orange seal containing tea bags, red and blue ceramic spice jars, a white cup, and a metal spoon. On the kitchen counter at the top of the screen, there is a row of spice bottles.", "visual_elements": [ { "content": "The camera is looking down at the kitchen counter.", "type": "camera", "weight": 3 }, { "content": "On the left side of the screen is a white electric stove.", "type": "attribute", "weight": 2 }, { "content": "On the lower left of the screen is a stainless steel kettle.", "type": "attribute", "weight": 2 }, { "content": "To the left of the kettle is a colorful ceramic spoon holder with a floral pattern.", "type": "attribute", "weight": 2 }, { "content": "On the upper left of the screen is a blue stand on the stove with two slices of bread and an apple between them.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the screen is a khaki kitchen counter.", "type": "attribute", "weight": 2 }, { "content": "On the kitchen counter, from top to bottom, there is a glass jar with a green lid filled with pink sweetener packets, a glass jar with an orange seal and tea bags, red and blue ceramic spice jars, a white cup and a metal spoon.", "type": "attribute", "weight": 2 }, { "content": "On the kitchen counter at the top of the screen, there is a row of spice bottles.", "type": "scene", "weight": 1 } ] }, { "event": "Next, a hand enters the picture from the right side of the frame. The owner of this hand is wearing a black shirt with a pattern. The right hand first places the orange glass jar on the kitchen counter to the right of the green-lid glass jar, and then places the red and blue ceramic spice jars to the right of the orange glass jar in a row. Then, the hand takes out the two sealed tea bags from the glass jar with an orange seal. Then, both hands tear off a tea bag and put it into the white cup. Then, the person's right hand picks up the kettle, opens the lid of the kettle, and pours water into the cup. Then, the person's left hand picks up a metal spoon and stirs the tea bag in the cup. After stirring a few times, the hand puts down the spoon and takes a torn pink sweetener packet from the green-lid glass jar and pours it into the cup. Finally, the left hand puts the sweetener packet back on the green lid of the glass jar and stirs it again with the spoon.", "visual_elements": [ { "content": "Next, a person's right hand enters the picture from the right side of the picture.", "type": "attribute", "weight": 3 }, { "content": "The owner of the right hand is wearing a black patterned top.", "type": "attribute", "weight": 1 }, { "content": "The right hand first places the orange glass jar on the kitchen counter to the right of the green-lid glass jar.", "type": "action", "weight": 3 }, { "content": "Then, the red and blue ceramic spice jars are placed one by one to the right of the orange glass jar, in a row.", "type": "action", "weight": 3 }, { "content": "Then, the right hand takes out the two sealed tea bags from the glass jar with the orange seal.", "type": "action", "weight": 2 }, { "content": "Then, both hands tear off a tea bag and put the tea bag into the white cup.", "type": "action", "weight": 3 }, { "content": "Then, the left hand opens the lid of the kettle and pours water into the cup.", "type": "action", "weight": 3 }, { "content": "Then, the right hand picks up a metal spoon and stirs the tea bag in the cup.", "type": "action", "weight": 3 }, { "content": "After stirring a few times, the right hand puts down the spoon.", "type": "action", "weight": 1 }, { "content": "The right hand takes a torn pink sweetener bag from the green lid of the glass jar.", "type": "action", "weight": 3 }, { "content": "The right hand pours the white sugar in the bag into the cup.", "type": "action", "weight": 2 }, { "content": "Finally, the right hand puts the bag back on the green lid of the glass jar and stirs it again with the spoon.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 20 }, { "index": "TUNA_0475", "video_path": "PerceptionTest/video_2062.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 21.1, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera pointed at a black marble table. On the left side of the table is a black high-top canvas shoe. On the canvas shoe is a remote control and a white charging cable. On the table are two white paper bags, folded banknotes and a black wallet. On the right side of the table is a yellow backpack with brown ears and a face. A woman is sitting behind the table. The woman is plump and wears a blue and white striped sleeveless top with red letters. The woman has her hands on the table. The background is a yellow wall. On the left side of the screen is the corner of a white door.\nThe camera keeps shaking. The woman opens her black wallet and puts the banknotes, paper bag and a small object into the wallet one by one. Then, the woman closes the wallet, unzips the yellow backpack and puts the wallet into the backpack. Finally, the woman zips up the backpack and puts her hands back on the table.", "events": [ { "event": "The video begins with the camera pointed at a black marble table. On the left side of the table is a black high-top canvas shoe. On the canvas shoe is a remote control and a white charging cable. On the table are two white paper bags, folded banknotes and a black wallet. On the right side of the table is a yellow backpack with brown ears and a face. A woman is sitting behind the table. The woman is plump and wears a blue and white striped sleeveless top with red letters. The woman has her hands on the table. The background is a yellow wall. On the left side of the screen is the corner of a white door.", "visual_elements": [ { "content": "The camera is pointed at a black marble table.", "type": "camera", "weight": 3 }, { "content": "A black high-top canvas shoe is placed on the left side of the table.", "type": "attribute", "weight": 2 }, { "content": "A remote control and a white charging cable are placed on the canvas shoe.", "type": "attribute", "weight": 2 }, { "content": "There are two white paper bags, folded banknotes and a black wallet on the table.", "type": "attribute", "weight": 2 }, { "content": "There is a yellow backpack on the right side of the table.", "type": "attribute", "weight": 2 }, { "content": "A woman is sitting behind the table.", "type": "attribute", "weight": 3 }, { "content": "The woman is plump and wears a blue and white striped sleeveless top with red letters.", "type": "attribute", "weight": 2 }, { "content": "The woman has her hands on the table.", "type": "action", "weight": 3 }, { "content": "The background is a yellow wall.", "type": "scene", "weight": 2 }, { "content": "The left side of the picture is a corner of a white door.", "type": "scene", "weight": 1 } ] }, { "event": "The camera keeps shaking. The woman opens her black wallet and puts the banknotes, paper bag and a small object into the wallet one by one. Then, the woman closes the wallet, unzips the yellow backpack and puts the wallet into the backpack. Finally, the woman zips up the backpack and puts her hands back on the table.", "visual_elements": [ { "content": "The camera keeps shaking.", "type": "camera", "weight": 3 }, { "content": "The woman opens the black wallet and puts the banknotes, paper bag and a small object into the wallet in turn.", "type": "action", "weight": 3 }, { "content": "Then, the woman closes the wallet.", "type": "action", "weight": 2 }, { "content": "Then, the woman unzips the yellow backpack and puts the wallet into the backpack.", "type": "action", "weight": 3 }, { "content": "Finally, the woman zips up the backpack and puts her hands back on the table.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0476", "video_path": "PerceptionTest/video_214.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 25.29, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a black man in a white T-shirt. A white USB cable hangs around the man's neck. The man stands in front of a table. On the table are a piece of bread, two red apples, a pair of white pants and some documents. The background is a room with yellow walls. There are some papers on the wall and a black door on the right side of the picture.\nThe man adjusts the data cable with both hands, and then lays the pants flat on the table. Then, the man holds down the pants with his left hand, and takes out a black iron with an orange bottom and top from the left side of the screen with his right hand. From the waist to the trouser legs, the man irons the clothes skillfully. At the end of the video, the man puts the iron back and stretches his right hand to the front left of the screen to turn off the camera.", "events": [ { "event": "The video begins with the camera focusing on a black man in a white T-shirt. A white USB cable hangs around the man's neck. The man stands in front of a table. On the table are a piece of bread, two red apples, a pair of white pants and some documents. The background is a room with yellow walls. There are some papers on the wall and a black door on the right side of the picture.", "visual_elements": [ { "content": "The camera is focused on a black man wearing a white T-shirt.", "type": "camera", "weight": 3 }, { "content": "A white USB cable hangs around the man's neck.", "type": "attribute", "weight": 1 }, { "content": "The man stands in front of a table.", "type": "action", "weight": 3 }, { "content": "On the table are a piece of bread, two red apples, a pair of light-colored pants and some documents.", "type": "attribute", "weight": 3 }, { "content": "The background is a room with yellow walls.", "type": "scene", "weight": 3 }, { "content": "There are some papers on the wall.", "type": "scene", "weight": 1 }, { "content": "There is a black door on the right side of the picture.", "type": "scene", "weight": 2 } ] }, { "event": "The man adjusts the data cable with both hands, and then lays the pants flat on the table. Then, the man holds down the pants with his left hand, and takes out a black iron with an orange bottom and top from the left side of the screen with his right hand. From the waist to the trouser legs, the man irons the clothes skillfully. At the end of the video, the man puts the iron back and stretches his right hand to the front left of the screen to turn off the camera.", "visual_elements": [ { "content": "The man adjusts the data cable with both hands, and then lays the pants flat on the table.", "type": "action", "weight": 3 }, { "content": "Then, the man holds down the pants with his left hand, and takes out a black iron with an orange bottom and top from the left side of the screen with his right hand.", "type": "action", "weight": 3 }, { "content": "From the waist to the trouser legs, the man irons the clothes skillfully.", "type": "action", "weight": 3 }, { "content": "At the end of the video, the man puts the iron back and stretches his right hand to the front left of the screen to turn off the camera.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0477", "video_path": "PerceptionTest/video_216.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 33.67, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a marble table covered with a white lace tablecloth. From the left to the right of the screen, there is a plastic bottle of cooking oil with an orange bottle cap, a metal spoon, a white cup with a cartoon pattern, a small black bowl filled with white powder, several letter blocks of different colors, an egg, a stainless steel electric kettle and an orange paper box. On the left side of the screen is a white wardrobe. On the right side of the screen is a black shelf with transparent plastic boxes.\nThe camera keeps shaking. A young Asian man wearing a black T-shirt with yellow letters is sitting at the table, on the left side of the screen. The man is facing the camera sideways with his arm on the table. Then, the man extends his right hand, takes out a tea bag from the orange box, and then puts the tea bag into the cup with a cartoon pattern. Then, the man picks up the electric kettle and pours hot water into the cup. After the hot water is poured into the cup, the man puts down the kettle. Then, the man scoops some white powder from the black bowl with a spoon and adds it to the tea and stirs it. Then the man puts down the spoon and puts a pink letter \"Z\" in the middle of the letter block with his right hand. Then, the man continues to stir the water in the cup, and then puts the letter \"Z\" behind the small black bowl. Finally, the man crosses his hands on the table.", "events": [ { "event": "At the beginning of the video, the camera is focused on a marble table covered with a white lace tablecloth. From the left to the right of the screen, there is a plastic bottle of cooking oil with an orange bottle cap, a metal spoon, a white cup with a cartoon pattern, a small black bowl filled with white powder, several letter blocks of different colors, an egg, a stainless steel electric kettle and an orange paper box. On the left side of the screen is a white wardrobe. On the right side of the screen is a black shelf with transparent plastic boxes.", "visual_elements": [ { "content": "The camera is pointed at a marble table covered with a white lace tablecloth.", "type": "camera", "weight": 3 }, { "content": "On the table, from left to right, there is a plastic bottle of cooking oil with an orange cap, a metal spoon, a white cup with a cartoon pattern, a small black bowl with white powder, several letter blocks of different colors, an egg, a stainless steel electric kettle and an orange paper box.", "type": "attribute", "weight": 3 }, { "content": "In the background, there is a white wardrobe on the left side of the picture.", "type": "scene", "weight": 1 }, { "content": "There is a black shelf with transparent plastic boxes on the right side of the picture.", "type": "scene", "weight": 1 } ] }, { "event": "The camera keeps shaking. A young Asian man wearing a black T-shirt with yellow letters is sitting at the table, on the left side of the screen. The man is facing the camera sideways with his arm on the table. Then, the man extends his right hand, takes out a tea bag from the orange box, and then puts the tea bag into the cup with a cartoon pattern. Then, the man picks up the electric kettle and pours hot water into the cup. After the hot water is poured into the cup, the man puts down the kettle. Then, the man scoops some white powder from the black bowl with a spoon and adds it to the tea and stirs it. Then the man puts down the spoon and puts a pink letter \"Z\" in the middle of the letter block with his right hand. Then, the man continues to stir the water in the cup, and then puts the letter \"Z\" behind the small black bowl. Finally, the man crosses his hands on the table.", "visual_elements": [ { "content": "The camera keeps shaking.", "type": "camera", "weight": 3 }, { "content": "A young Asian man wearing a black T-shirt with yellow letters is sitting at the table, on the left side of the picture.", "type": "action", "weight": 3 }, { "content": "The man faces the camera sideways with his arm on the table.", "type": "action", "weight": 2 }, { "content": "Then, the man extends his right hand and takes out a tea bag from the orange box.", "type": "action", "weight": 3 }, { "content": "Then the man puts the tea bag into the cup with a cartoon pattern.", "type": "action", "weight": 3 }, { "content": "Then, the man picks up the electric kettle and pours hot water into the cup.", "type": "action", "weight": 3 }, { "content": "Then, the man scoops some of the white powder from the black bowl and stirs it into the tea.", "type": "action", "weight": 3 }, { "content": "Then the man puts down the spoon and puts a pink letter \"Z\" in the middle of the letter block with his right hand.", "type": "action", "weight": 3 }, { "content": "Then, the man continues to stir the water in the cup and puts the letter \"Z\" behind the small black bowl.", "type": "action", "weight": 3 }, { "content": "Finally, the man crosses his hands on the table.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0478", "video_path": "PerceptionTest/video_2179.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 17.69, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with a black woman wearing a black short-sleeved T-shirt with white letters. The woman sits behind a light grey table with her arms crossed. From left to right, the table is filled with a cream paper bag, a khaki round object, a black pen, a black wallet, a white piece of paper, a few coins and some colorful letter toys. The background is a white wall.\nThe woman first folds the white paper. Then the woman puts the paper into a black wallet. Next, the woman picks up a few coins from the table and puts them into the wallet. Then, the woman zips up the wallet and puts it into the cream-colored paper bag on the left side of the screen. At the end of the video, the woman crosses her hands on the table.", "events": [ { "event": "The video begins with a black woman wearing a black short-sleeved T-shirt with white letters. The woman sits behind a light grey table with her arms crossed. From left to right, the table is filled with a cream paper bag, a khaki round object, a black pen, a black wallet, a white piece of paper, a few coins and some colorful letter toys. The background is a white wall.", "visual_elements": [ { "content": "There is a black woman in the video wearing a black short-sleeved T-shirt with white letters.", "type": "attribute", "weight": 3 }, { "content": "The woman sits behind a light gray table with her arms crossed.", "type": "action", "weight": 3 }, { "content": "On the table from left to right are a cream paper bag, a khaki round object, a black pen, a black wallet, a white piece of paper, a few coins and some colorful letter toys.", "type": "attribute", "weight": 3 }, { "content": "The background is a white wall.", "type": "scene", "weight": 3 } ] }, { "event": "The woman first folds the white paper. Then the woman puts the paper into a black wallet. Next, the woman picks up a few coins from the table and puts them into the wallet. Then, the woman zips up the wallet and puts it into the cream-colored paper bag on the left side of the screen. At the end of the video, the woman crosses her hands on the table.", "visual_elements": [ { "content": "The woman folds the white paper first.", "type": "action", "weight": 3 }, { "content": "Then the woman puts the paper into the black wallet.", "type": "action", "weight": 3 }, { "content": "Next, the woman puts a few coins into the wallet.", "type": "action", "weight": 3 }, { "content": "Then, the woman zips up the wallet.", "type": "action", "weight": 3 }, { "content": "Then, the woman puts the wallet into the cream paper bag on the left side of the screen.", "type": "action", "weight": 3 }, { "content": "At the end of the video, the woman crosses her hands on the table.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0479", "video_path": "PerceptionTest/video_2189.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is pointed at a table with various items on it. The items on the table are messy, including a glass with two eggs, a bottle of orange liquid, a blue kettle, a tomato, a bottle of milk, some wires and plugs, and some sundries. A man wearing a gray top and black and yellow plaid pants is bent over and standing behind the table on the left side of the screen. There is a green door on the right side of the screen in the background. There is a black table on the left side of the door, on which is placed a black electric kettle and some kitchen utensils and food.\nThe man straightens up, showing the items in front of the camera with his right hand and holding the items to be shown in his left hand. He shows a black hat with a red pattern, a black charging cable, a black bag, and a blue and white book in front of the camera, and places them one by one on the table in front of him.\nAt the end of the video, the man bends down and reaches for the camera with both hands, then takes it off. The camera moves to the right and keeps shaking left and right.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a table with various items on it. The items on the table are messy, including a glass with two eggs, a bottle of orange liquid, a blue kettle, a tomato, a bottle of milk, some wires and plugs, and some sundries. A man wearing a gray top and black and yellow plaid pants is bent over and standing behind the table on the left side of the screen. There is a green door on the right side of the screen in the background. There is a black table on the left side of the door, on which is placed a black electric kettle and some kitchen utensils and food.", "visual_elements": [ { "content": "The camera is pointed at a table with various items on it.", "type": "camera", "weight": 3 }, { "content": "The items on the table are messy, including a glass with two eggs, a bottle of orange liquid, a blue kettle, a tomato, a bottle of milk, some wires and plugs, and some sundries.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the screen is a man wearing a gray top and black and yellow plaid pants.", "type": "attribute", "weight": 3 }, { "content": "The man is bent over and standing behind the table.", "type": "action", "weight": 3 }, { "content": "There is a green door on the right side of the screen in the background.", "type": "scene", "weight": 1 }, { "content": "There is a black table on the left side of the door.", "type": "scene", "weight": 1 }, { "content": "On the table is a black electric kettle and some kitchen utensils and food.", "type": "scene", "weight": 1 } ] }, { "event": "The man straightens up, showing the items in front of the camera with his right hand and holding the items to be shown in his left hand. He shows a black hat with a red pattern, a black charging cable, a black bag, and a blue and white book in front of the camera, and places them one by one on the table in front of him.", "visual_elements": [ { "content": "The man straightens his back, showing the items in front of the camera with his right hand, and holding the items to be shown in his left hand.", "type": "action", "weight": 3 }, { "content": "The man shows a black hat with a red pattern, a black charging cable, a black bag, and a blue and white book in front of the camera.", "type": "action", "weight": 3 }, { "content": "Then, the man places the items he have shown one by one on the table in front of him.", "type": "action", "weight": 2 } ] }, { "event": "At the end of the video, the man bends down and reaches for the camera with both hands, then takes it off. The camera moves to the right and keeps shaking left and right.", "visual_elements": [ { "content": "At the end of the video, the man bends down and reaches for the camera with both hands, and takes it off.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right and keeps shaking left and right.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0480", "video_path": "PerceptionTest/video_221.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 10.85, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is looking down at a table. On the right side of the screen, there is a pink backpack and a purple backpack with blue denim straps on the table. The backs of the two backpacks are facing the camera. On the left side of the screen, there is a black pencil case with a purple zipper and pattern on the table. Under the pencil case is another red pencil case. On the left side of the screen, there is also a book with pink flowers and cartoon characters on the cover, as well as a row of small toys. In the background, the yellow checkered tablecloth on the table is torn, revealing the gray tabletop.\nThe camera keeps shaking. A pair of hands appears from the top of the screen. The owner of the hands is wearing a purple long-sleeved shirt. The hands put the book with pink flowers and cartoon characters on the cover into the purple backpack. Then, the hands put another book with a racing car pattern on the cover under the cartoon book into the pink backpack. Then, the hands put the black pencil case into the purple backpack. Then, the hands put the red pencil case into the pink backpack. At the end of the video, the hands leave the screen.", "events": [ { "event": "At the beginning of the video, the camera is looking down at a table. On the right side of the screen, there is a pink backpack and a purple backpack with blue denim straps on the table. The backs of the two backpacks are facing the camera. On the left side of the screen, there is a black pencil case with a purple zipper and pattern on the table. Under the pencil case is another red pencil case. On the left side of the screen, there is also a book with pink flowers and cartoon characters on the cover, as well as a row of small toys. In the background, the yellow checkered tablecloth on the table is torn, revealing the gray tabletop.", "visual_elements": [ { "content": "The camera is shooting a table from above.", "type": "camera", "weight": 3 }, { "content": "On the right side of the screen, there is a pink backpack and a purple backpack on the table.", "type": "attribute", "weight": 3 }, { "content": "The backs of the two backpacks are facing the camera.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the screen, there is a black pencil case with a purple zipper and pattern on the table.", "type": "attribute", "weight": 3 }, { "content": "There is another red pencil case under the black pencil case.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the screen, there is also a book with pink flowers and cartoon images on the cover and a row of small toys.", "type": "attribute", "weight": 3 }, { "content": "In the background, the yellow checkered tablecloth on the table is torn, revealing the gray tabletop.", "type": "scene", "weight": 1 } ] }, { "event": "The camera keeps shaking. A pair of hands appears from the top of the screen. The owner of the hands is wearing a purple long-sleeved shirt. The hands put the book with pink flowers and cartoon characters on the cover into the purple backpack. Then, the hands put another book with a racing car pattern on the cover under the cartoon book into the pink backpack. Then, the hands put the black pencil case into the purple backpack. Then, the hands put the red pencil case into the pink backpack. At the end of the video, the hands leave the screen.", "visual_elements": [ { "content": "The camera keeps shaking.", "type": "camera", "weight": 3 }, { "content": "A pair of hands appears from the top of the screen.", "type": "attribute", "weight": 3 }, { "content": "The owner of the hands is wearing a purple long-sleeved shirt.", "type": "attribute", "weight": 1 }, { "content": "The hands put the book with pink flowers and cartoon images on the cover into the purple backpack.", "type": "action", "weight": 3 }, { "content": "Then, the hands put another book with a racing car pattern on the cover under the cartoon book into the pink backpack.", "type": "action", "weight": 3 }, { "content": "Then, the hands put the black pencil case into the purple backpack.", "type": "action", "weight": 3 }, { "content": "Then, the hands put the red pencil case into the pink backpack.", "type": "action", "weight": 3 }, { "content": "At the end of the video, the hands leave the screen.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0481", "video_path": "PerceptionTest/video_2251.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 16.53, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden table. On the tabletop, there is a white piece of paper, a yellow colored pen, and a pinkish-white thermos cup. The background is a light pink wall with two electrical outlets. There is a power cord plugged into the outlet on the right, and the rather long power cord extends to the edge of the table.\nThe camera shakes from side to side. Then a hand enters the frame from the right, picks up the yellow pen and gestures in the air above the white paper. Then the right hand takes away this yellow colored pen. Next, both hands pick up the white paper on the table and bring it close to the camera. Then both hands put the white paper back onto the table. After that, both hands take out two colored alphabet magnets from below the frame. The left hand holds a blue \"N\" and places it under the white paper, while the right hand holds a red \"y\" and places it under the white paper. The camera moves slightly downward and the picture gets blurry.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden table. On the tabletop, there is a white piece of paper, a yellow colored pen, and a pinkish-white thermos cup. The background is a light pink wall with two electrical outlets. There is a power cord plugged into the outlet on the right, and the rather long power cord extends to the edge of the table.", "visual_elements": [ { "content": "The camera is focused on a wooden table.", "type": "camera", "weight": 3 }, { "content": "On the tabletop, there is a white piece of paper, a yellow colored pen, and a pinkish-white thermos cup.", "type": "attribute", "weight": 3 }, { "content": "The background is a light pink wall.", "type": "scene", "weight": 2 }, { "content": "On the wall are two electrical outlets.", "type": "attribute", "weight": 2 }, { "content": "There is a power cord plugged into the outlet on the right.", "type": "attribute", "weight": 2 }, { "content": "The rather long power cord extends to the edge of the table.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera shakes from side to side. Then a hand enters the frame from the right, picks up the yellow pen and gestures in the air above the white paper. Then the right hand takes away this yellow colored pen. Next, both hands pick up the white paper on the table and bring it close to the camera. Then both hands put the white paper back onto the table. After that, both hands take out two colored alphabet magnets from below the frame. The left hand holds a blue \"N\" and places it under the white paper, while the right hand holds a red \"y\" and places it under the white paper. The camera moves slightly downward and the picture gets blurry.", "visual_elements": [ { "content": "The camera shakes from side to side.", "type": "camera", "weight": 3 }, { "content": "Then a hand enters the frame from the right.", "type": "action", "weight": 2 }, { "content": "The hand picks up the yellow pen and gestures in the air above the white paper.", "type": "action", "weight": 3 }, { "content": "Then the right hand takes away this yellow colored pen.", "type": "action", "weight": 2 }, { "content": "Next, both hands pick up the white paper on the table and bring it close to the camera.", "type": "action", "weight": 3 }, { "content": "Then both hands put the white paper back onto the table.", "type": "action", "weight": 3 }, { "content": "After that, both hands take out two colored alphabet magnets from below the frame.", "type": "action", "weight": 3 }, { "content": "The left hand holds a blue \"N\" and places it under the white paper.", "type": "action", "weight": 3 }, { "content": "The right hand holds a red \"y\" and places it under the white paper.", "type": "action", "weight": 3 }, { "content": "The camera moves slightly downward.", "type": "camera", "weight": 3 }, { "content": "The picture gets blurry.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0482", "video_path": "PerceptionTest/video_2253.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 11.32, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a person wearing a white sportswear is standing in front of a wooden table. On the table are placed two upside-down transparent glass cups, a red and white cardboard box of a mixer, a black flowerpot with a green plant in it, and a brush with blue and yellow handles. The brown strap of the brush is wrapped around the roots of the green plant. The background is a light yellow wall.\nThis person initially places both hands on the table. Then, this person picks up the glass cup on the left in the frame with the right hand, taps a round coin under the cup with the index finger of the left hand, and then puts the cup back in its original position. Subsequently, this person picks up the glass cup on the right with the right hand to show that there is no coin under it, and then also puts the glass cup back in its original position.\nNext, this person presses the bottom of the glass cup on the right side of the frame with the right hand and presses the bottom of the glass cup on the left side of the frame with the left hand, and starts to alternate the positions of the two cups. Finally, the glass cup with the coin is on the right side of the frame, and the glass cup without the coin is on the left side of the frame. The camera shakes throughout the whole process.", "events": [ { "event": "At the beginning of the video, a person wearing a white sportswear is standing in front of a wooden table. On the table are placed two upside-down transparent glass cups, a red and white cardboard box of a mixer, a black flowerpot with a green plant in it, and a brush with blue and yellow handles. The brown strap of the brush is wrapped around the roots of the green plant. The background is a light yellow wall.", "visual_elements": [ { "content": "A person wearing a white sportswear is standing in front of a wooden table.", "type": "attribute", "weight": 3 }, { "content": "On the table are placed two upside-down transparent glass cups, a red and white cardboard box of a mixer, a black flowerpot with a green plant in it, and a brush with blue and yellow handles.", "type": "attribute", "weight": 3 }, { "content": "The brown strap of the brush is wrapped around the roots of the green plant.", "type": "attribute", "weight": 2 }, { "content": "The background is a light yellow wall.", "type": "scene", "weight": 1 } ] }, { "event": "This person initially places both hands on the table. Then, this person picks up the glass cup on the left in the frame with the right hand, taps a round coin under the cup with the index finger of the left hand, and then puts the cup back in its original position. Subsequently, this person picks up the glass cup on the right with the right hand to show that there is no coin under it, and then also puts the glass cup back in its original position.", "visual_elements": [ { "content": "This person initially places both hands on the table.", "type": "action", "weight": 2 }, { "content": "This person picks up the glass cup on the left in the frame with the right hand.", "type": "action", "weight": 3 }, { "content": "This person taps a round coin under the cup with the index finger of the left hand.", "type": "action", "weight": 3 }, { "content": "This person puts the cup back in its original position.", "type": "action", "weight": 2 }, { "content": "This person picks up the glass cup on the right with the right hand to show that there is no coin under it.", "type": "action", "weight": 3 }, { "content": "This person also puts the glass cup back in its original position.", "type": "action", "weight": 2 } ] }, { "event": "Next, this person presses the bottom of the glass cup on the right side of the frame with the right hand and presses the bottom of the glass cup on the left side of the frame with the left hand, and starts to alternate the positions of the two cups. Finally, the glass cup with the coin is on the right side of the frame, and the glass cup without the coin is on the left side of the frame. The camera shakes throughout the whole process.", "visual_elements": [ { "content": "This person presses the bottom of the glass cup on the right side of the frame with the right hand.", "type": "action", "weight": 3 }, { "content": "This person presses the bottom of the glass cup on the left side of the frame with the left hand, and starts to alternate the positions of the two cups.", "type": "action", "weight": 3 }, { "content": "The glass cup with the coin is on the right side of the frame.", "type": "attribute", "weight": 2 }, { "content": "The glass cup without the coin is on the left side of the frame.", "type": "attribute", "weight": 2 }, { "content": "The camera shakes throughout the whole process.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0483", "video_path": "PerceptionTest/video_2264.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 11.82, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a wooden table with wide brown and yellow stripes. On the tabletop, there are three upside-down metal cups. There is a fork and a pencil on the left side of the table in the frame, a black mobile phone is placed in the middle, and a ceramic cup with colored stripes is on the right side. There is a red bottle cap on the table in the close-up shot of the camera. A child wearing a pink top is sitting on a chair and has both hands on the table.\nThe camera shakes slightly. In the frame, this child starts to pick up and then put down the upside-down metal cups on the table one by one from right to left with both hands. The first two cups are empty, and there is a green ball in the last cup. The camera moves slightly to the right. This child uses both hands to change the positions of these three metal cups and repeats this action. Finally, the three cups are rearranged in a row again, and the left hand of this child intends to open the cup on the far right side of the frame.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden table with wide brown and yellow stripes. On the tabletop, there are three upside-down metal cups. There is a fork and a pencil on the left side of the table in the frame, a black mobile phone is placed in the middle, and a ceramic cup with colored stripes is on the right side. There is a red bottle cap on the table in the close-up shot of the camera. A child wearing a pink top is sitting on a chair and has both hands on the table.", "visual_elements": [ { "content": "The camera is focused on a wooden table with wide brown and yellow stripes.", "type": "camera", "weight": 3 }, { "content": "On the tabletop, there are three upside-down metal cups.", "type": "attribute", "weight": 3 }, { "content": "There is a fork and a pencil on the left side of the table in the frame.", "type": "attribute", "weight": 2 }, { "content": "A black mobile phone is placed in the middle of the table.", "type": "attribute", "weight": 2 }, { "content": "A ceramic cup with colored stripes is on the right side of the table.", "type": "attribute", "weight": 2 }, { "content": "There is a red bottle cap on the table in the close-up shot of the camera.", "type": "attribute", "weight": 2 }, { "content": "A child wearing a pink top is sitting on a chair and has both hands on the table.", "type": "action", "weight": 3 } ] }, { "event": "The camera shakes slightly. In the frame, this child starts to pick up and then put down the upside-down metal cups on the table one by one from right to left with both hands. The first two cups are empty, and there is a green ball in the last cup. The camera moves slightly to the right. This child uses both hands to change the positions of these three metal cups and repeats this action. Finally, the three cups are rearranged in a row again, and the left hand of this child intends to open the cup on the far right side of the frame.", "visual_elements": [ { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "In the frame, this child starts to pick up and then put down the upside-down metal cups on the table one by one from right to left with both hands.", "type": "action", "weight": 3 }, { "content": "The first two cups are empty.", "type": "attribute", "weight": 2 }, { "content": "There is a green ball in the last cup.", "type": "attribute", "weight": 2 }, { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "This child uses both hands to change the positions of these three metal cups.", "type": "action", "weight": 3 }, { "content": "This child repeats this action.", "type": "action", "weight": 3 }, { "content": "Finally, the three cups are rearranged in a row again.", "type": "action", "weight": 3 }, { "content": "The left hand of this child intends to open the cup on the far right side of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0484", "video_path": "PerceptionTest/video_2386.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 29.85, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a white piece of paper and a black marker on a light-colored table. A person wearing blue clothes is sitting beside the table in the frame. This person picks up the marker with the right hand and starts to draw shapes on the white paper. The person uses the left hand to hold the white paper still. Firstly, a hexagon is drawn at the lower left corner of the white paper. Then a circle is drawn above the hexagon. After that, the person slightly lifts the drawn white paper and shows it to the camera.\nAfter finishing drawing the shapes, the hand places the marker on the paper, and then picks up a small cut hexagon piece of paper from the left side. The right hand places this hexagon piece of paper on the just-drawn white paper and adjusts its position to make it completely cover the corresponding part. Then, the left hand picks up a small cut circular piece of paper and also places it on the drawn white paper, adjusting its position in the same way to make it completely cover the drawn circle. Finally, the person moves the original white paper as a whole to the right. On the left side of the white paper is a fork with a white handle inserted into the paper. The camera shakes slightly throughout the whole process.", "events": [ { "event": "At the beginning of the video, the camera is focused on a white piece of paper and a black marker on a light-colored table. A person wearing blue clothes is sitting beside the table in the frame. This person picks up the marker with the right hand and starts to draw shapes on the white paper. The person uses the left hand to hold the white paper still. Firstly, a hexagon is drawn at the lower left corner of the white paper. Then a circle is drawn above the hexagon. After that, the person slightly lifts the drawn white paper and shows it to the camera.", "visual_elements": [ { "content": "The camera is focused on a white piece of paper and a black marker on a light-colored table.", "type": "camera", "weight": 3 }, { "content": "A person wearing blue clothes is sitting beside the table in the frame.", "type": "attribute", "weight": 2 }, { "content": "This person picks up the marker with the right hand and starts to draw shapes on the white paper.", "type": "action", "weight": 3 }, { "content": "The person uses the left hand to hold the white paper still.", "type": "action", "weight": 2 }, { "content": "A hexagon is drawn at the lower left corner of the white paper by this person.", "type": "action", "weight": 3 }, { "content": "Then a circle is drawn above the hexagon by this person.", "type": "action", "weight": 3 }, { "content": "The person slightly lifts the drawn white paper and shows it to the camera.", "type": "action", "weight": 3 } ] }, { "event": "After finishing drawing the shapes, the hand places the marker on the paper, and then picks up a small cut hexagon piece of paper from the left side. The right hand places this hexagon piece of paper on the just-drawn white paper and adjusts its position to make it completely cover the corresponding part. Then, the left hand picks up a small cut circular piece of paper and also places it on the drawn white paper, adjusting its position in the same way to make it completely cover the drawn circle. Finally, the person moves the original white paper as a whole to the right. On the left side of the white paper is a fork with a white handle inserted into the paper. The camera shakes slightly throughout the whole process.", "visual_elements": [ { "content": "After finishing drawing the shapes, the hand places the marker on the paper.", "type": "action", "weight": 2 }, { "content": "This person picks up a small cut hexagon piece of paper from the left side.", "type": "action", "weight": 3 }, { "content": "The right hand places this hexagon piece of paper on the just-drawn white paper.", "type": "action", "weight": 3 }, { "content": "The right hand adjusts its position to make it completely cover the corresponding part.", "type": "attribute", "weight": 2 }, { "content": "The left hand picks up a small cut circular piece of paper and also places it on the drawn white paper,", "type": "action", "weight": 3 }, { "content": "The left hand adjusts its position in the same way to make it completely cover the drawn circle.", "type": "attribute", "weight": 2 }, { "content": "The person moves the original white paper as a whole to the right.", "type": "action", "weight": 3 }, { "content": "On the left side of the white paper is a fork with a white handle inserted into the paper.", "type": "attribute", "weight": 3 }, { "content": "The camera shakes slightly throughout the whole process.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0485", "video_path": "PerceptionTest/video_2456.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 24.52, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden table. There are some coins scattered on the tabletop, along with a piece of white paper. Next to the white paper is a banknote, with a lock pressing on it. There are also the same upright banknotes and several masks on the table. The denominations of the banknotes are all 500. A black schoolbag, white strings and a wooden stool can be seen around the table.\nThe camera remains stable. A pair of hands with darker skin tones appear on the right side of the frame. The left hand holds down the white paper on the table, and the right hand takes a green pen and starts to write on the white paper. The right hand writes down the letters \"F\", \"A\" and \"T\" one by one on the paper, forming the word \"FAT\". After writing the letters, both hands lift the white paper up close to the camera to show it, and then put the white paper back in its original place and both hands leave the frame. Then, the hands take out the cut alphabet paper pieces from the right side of the frame. The \"F\" paper piece is yellow, and the \"A\" and \"T\" paper pieces are both white. The right hand places the paper pieces on the white paper with the word \"FAT\" written on it one after another.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden table. There are some coins scattered on the tabletop, along with a piece of white paper. Next to the white paper is a banknote, with a lock pressing on it. There are also the same upright banknotes and several masks on the table. The denominations of the banknotes are all 500. A black schoolbag, white strings and a wooden stool can be seen around the table.", "visual_elements": [ { "content": "The camera is focused on a wooden table.", "type": "camera", "weight": 3 }, { "content": "There are some coins scattered on the tabletop, along with a piece of white paper.", "type": "attribute", "weight": 2 }, { "content": "Next to the white paper is a banknote.", "type": "attribute", "weight": 2 }, { "content": "On the banknote is pressing a lock.", "type": "attribute", "weight": 2 }, { "content": "There are also the same upright banknotes and several masks on the table.", "type": "attribute", "weight": 2 }, { "content": "The denominations of the banknotes are all 500.", "type": "attribute", "weight": 2 }, { "content": "A black schoolbag, white strings and a wooden stool can be seen around the table.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera remains stable. A pair of hands with darker skin tones appear on the right side of the frame. The left hand holds down the white paper on the table, and the right hand takes a green pen and starts to write on the white paper. The right hand writes down the letters \"F\", \"A\" and \"T\" one by one on the paper, forming the word \"FAT\". After writing the letters, both hands lift the white paper up close to the camera to show it, and then put the white paper back in its original place and both hands leave the frame. Then, the hands take out the cut alphabet paper pieces from the right side of the frame. The \"F\" paper piece is yellow, and the \"A\" and \"T\" paper pieces are both white. The right hand places the paper pieces on the white paper with the word \"FAT\" written on it one after another.", "visual_elements": [ { "content": "The camera remains stable.", "type": "camera", "weight": 3 }, { "content": "A pair of hands with darker skin tones appear on the right side of the frame.", "type": "attribute", "weight": 2 }, { "content": "The left hand holds down the white paper on the table.", "type": "action", "weight": 2 }, { "content": "The right hand takes a green pen and starts to write on the white paper.", "type": "action", "weight": 3 }, { "content": "The right hand writes down the letters \"F\", \"A\" and \"T\" one by one on the paper.", "type": "action", "weight": 3 }, { "content": "The letters forms the word \"FAT\".", "type": "attribute", "weight": 3 }, { "content": "After writing the letters, both hands lift the white paper up close to the camera to show it.", "type": "action", "weight": 3 }, { "content": "Then both hands put the white paper back in its original place.", "type": "action", "weight": 3 }, { "content": "Both hands leave the frame.", "type": "action", "weight": 2 }, { "content": "The hands take out the cut alphabet paper pieces from the right side of the frame. The \"F\" paper piece is yellow, and the \"A\" and \"T\" paper pieces are both white.", "type": "action", "weight": 3 }, { "content": "The right hand places the paper pieces on the white paper with the word \"FAT\" written on it one after another.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0486", "video_path": "PerceptionTest/video_2536.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 17.47, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, a woman appears in the frame. She is sitting behind a light-colored table, wearing a white sleeveless top with lace decorations. Her hands are placed on the table, and several coffee cups of different colors and sizes are arranged beside her left hand. The background is a wall with mottled marks, and the color of the wall is a mixture of light yellow and brown.\nThe camera keeps shaking. The woman stretches out her right hand to pick up a coffee cup with black English words on it and places it on her right side. Then, she picks up a white coffee cup with colorful stripes with her right hand and places it behind the first coffee cup. After that, she reaches out again to pick up a coffee cup with alternating green and brown colors and places it in front of the first coffee cup. She repeats this action again. The pure white coffee cup is placed behind the white coffee cup with colorful stripes, and the blue-white coffee cup is placed in front of the coffee cup with alternating green and brown colors. All five coffee cups are neatly arranged on the left side of the table.", "events": [ { "event": "At the beginning of the video, a woman appears in the frame. She is sitting behind a light-colored table, wearing a white sleeveless top with lace decorations. Her hands are placed on the table, and several coffee cups of different colors and sizes are arranged beside her left hand. The background is a wall with mottled marks, and the color of the wall is a mixture of light yellow and brown.", "visual_elements": [ { "content": "A woman appears in the frame.", "type": "attribute", "weight": 3 }, { "content": "This woman is sitting behind a light-colored table, wearing a white sleeveless top with lace decorations.", "type": "attribute", "weight": 3 }, { "content": "This woman's hands are placed on the table.", "type": "attribute", "weight": 2 }, { "content": "Several coffee cups of different colors and sizes are arranged beside this woman's left hand.", "type": "attribute", "weight": 3 }, { "content": "The background is a wall with mottled marks.", "type": "scene", "weight": 2 }, { "content": "The color of the wall is a mixture of light yellow and brown.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera keeps shaking. The woman stretches out her right hand to pick up a coffee cup with black English words on it and places it on her right side. Then, she picks up a white coffee cup with colorful stripes with her right hand and places it behind the first coffee cup. After that, she reaches out again to pick up a coffee cup with alternating green and brown colors and places it in front of the first coffee cup. She repeats this action again. The pure white coffee cup is placed behind the white coffee cup with colorful stripes, and the blue-white coffee cup is placed in front of the coffee cup with alternating green and brown colors. All five coffee cups are neatly arranged on the left side of the table.", "visual_elements": [ { "content": "The camera keeps shaking.", "type": "camera", "weight": 3 }, { "content": "The woman stretches out her right hand to pick up a coffee cup with black English words on it and places it on her right side.", "type": "action", "weight": 3 }, { "content": "The woman picks up a white coffee cup with colorful stripes with her right hand and places it behind the first coffee cup.", "type": "action", "weight": 3 }, { "content": "After that, the woman reaches out again to pick up a coffee cup with alternating green and brown colors.", "type": "action", "weight": 3 }, { "content": "The right hand places the coffee cup with alternating green and brown colors in front of the first coffee cup.", "type": "action", "weight": 3 }, { "content": "The woman repeats this action again.", "type": "action", "weight": 3 }, { "content": "The pure white coffee cup is placed behind the white coffee cup with colorful stripes.", "type": "action", "weight": 3 }, { "content": "The blue-white coffee cup is placed in front of the coffee cup with alternating green and brown colors.", "type": "action", "weight": 3 }, { "content": "All five coffee cups are neatly arranged on the left side of the table.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0487", "video_path": "PerceptionTest/video_2559.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a brownish-black table. On the table, from far to near, there is a transparent plastic water bucket, a wooden box, a white cup with the words \"TEA LANGUAGE TEA\" written on it, and a blue water bottle. A green orange is placed beside the blue water bottle. The background is a light-colored wall.\nThen, a woman wearing black clothes appears on the left side of the frame. At first, she stands beside the table and claps her hands. Then, she picks up the blue water bottle on the table with her right hand and puts it down again. She repeats this action three times. Then, she opens the wooden box containing white granules with her left hand, and takes the spoon in the white cup with her right hand. She scoops three spoonfuls from the box and puts them into the cup. Then, she covers the lid of the wooden box with her left hand, picks up the green orange on the table with her right hand, tosses the orange back and forth with both hands, and then puts the orange back in its original place and leaves the frame. The camera remains stable throughout the whole process.", "events": [ { "event": "At the beginning of the video, the camera is focused on a brownish-black table. On the table, from far to near, there is a transparent plastic water bucket, a wooden box, a white cup with the words \"TEA LANGUAGE TEA\" written on it, and a blue water bottle. A green orange is placed beside the blue water bottle. The background is a light-colored wall.", "visual_elements": [ { "content": "The camera is focused on a brownish-black table.", "type": "camera", "weight": 3 }, { "content": "On the table, from far to close by, there is a transparent plastic water bucket, a wooden box, a white cup, and a blue water bottle.", "type": "attribute", "weight": 2 }, { "content": "On the white cup are written the words \"TEA LANGUAGE TEA\".", "type": "attribute", "weight": 1 }, { "content": "A green orange is placed beside the blue water bottle.", "type": "attribute", "weight": 2 }, { "content": "The background is a light-colored wall.", "type": "scene", "weight": 1 } ] }, { "event": "Then, a woman wearing black clothes appears on the left side of the frame. At first, she stands beside the table and claps her hands. Then, she picks up the blue water bottle on the table with her right hand and puts it down again. She repeats this action three times. Then, she opens the wooden box containing white granules with her left hand, and takes the spoon in the white cup with her right hand. She scoops three spoonfuls from the box and puts them into the cup. Then, she covers the lid of the wooden box with her left hand, picks up the green orange on the table with her right hand, tosses the orange back and forth with both hands, and then puts the orange back in its original place and leaves the frame. The camera remains stable throughout the whole process.", "visual_elements": [ { "content": "A woman wearing black clothes appears on the left side of the frame.", "type": "attribute", "weight": 2 }, { "content": "At first, the woman stands beside the table and claps her hands.", "type": "action", "weight": 3 }, { "content": "Then, the woman picks up the blue water bottle on the table with her right hand and puts it down again.", "type": "action", "weight": 3 }, { "content": "The woman repeats this action three times.", "type": "action", "weight": 3 }, { "content": "Then, the woman opens the wooden box containing white granules with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman takes the spoon in the white cup with her right hand.", "type": "action", "weight": 3 }, { "content": "The woman scoops three spoonfuls from the box and puts them into the cup.", "type": "action", "weight": 3 }, { "content": "The woman covers the lid of the wooden box with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman picks up the green orange on the table with her right hand.", "type": "action", "weight": 3 }, { "content": "The woman tosses the orange back and forth with both hands.", "type": "action", "weight": 3 }, { "content": "The woman then puts the orange back in its original place.", "type": "action", "weight": 2 }, { "content": "The woman leaves the frame.", "type": "action", "weight": 2 }, { "content": "The camera remains stable throughout the whole process.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0488", "video_path": "PerceptionTest/video_2632.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 29.67, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a small white round table. There is an electric iron and a pair of pink children's pants on the table. The background shows a brown sofa, with a bowl placed upside down on it and bananas on the bottom of the bowl.\nThe camera remains stable. A person wearing blue knee-length shorts enters the frame from the left side. This person sits on the sofa facing the table. Then, the person reaches out to unfold the folded pink children's pants on the table and spread them flat on the table. After that, the person holds the pink pants with the left hand and picks up the electric iron with the right hand to start ironing the pants. The iron moves back and forth on the pants. Next, the person turns the pants over and irons them again. Finally, the person puts the iron back on the table and leaves the frame, with the pants still spread out on the table.", "events": [ { "event": "At the beginning of the video, the camera is focused on a small white round table. There is an electric iron and a pair of pink children's pants on the table. The background shows a brown sofa, with a bowl placed upside down on it and bananas on the bottom of the bowl.", "visual_elements": [ { "content": "The camera is focused on a small white round table.", "type": "camera", "weight": 3 }, { "content": "There is an electric iron and a pair of pink children's pants on the table.", "type": "attribute", "weight": 3 }, { "content": "The background shows a brown sofa.", "type": "scene", "weight": 2 }, { "content": "A bowl is placed upside down on the sofa.", "type": "attribute", "weight": 2 }, { "content": "There are bananas on the bottom of the bowl.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera remains stable. A person wearing blue knee-length shorts enters the frame from the left side. This person sits on the sofa facing the table. Then, the person reaches out to unfold the folded pink children's pants on the table and spread them flat on the table. After that, the person holds the pink pants with the left hand and picks up the electric iron with the right hand to start ironing the pants. The iron moves back and forth on the pants. Next, the person turns the pants over and irons them again. Finally, the person puts the iron back on the table and leaves the frame, with the pants still spread out on the table.", "visual_elements": [ { "content": "The camera remains stable.", "type": "camera", "weight": 3 }, { "content": "A person wearing blue knee-length shorts enters the frame from the left side.", "type": "attribute", "weight": 2 }, { "content": "This person sits on the sofa facing the table.", "type": "action", "weight": 2 }, { "content": "The person reaches out to unfold the folded pink children's pants on the table and spread them flat on the table.", "type": "action", "weight": 3 }, { "content": "The person holds the pink pants with the left hand.", "type": "action", "weight": 3 }, { "content": "The person picks up the electric iron with the right hand to start ironing the pants.", "type": "action", "weight": 3 }, { "content": "The iron moves back and forth on the pants.", "type": "action", "weight": 3 }, { "content": "Next, the person turns the pants over and irons them again.", "type": "action", "weight": 3 }, { "content": "The person puts the iron back on the table and leaves the frame.", "type": "action", "weight": 3 }, { "content": "The pants are still spread out on the table.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0489", "video_path": "PerceptionTest/video_2647.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 32.46, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a table covered with a patterned tablecloth. There is a piece of white paper and a brown pen on the table. In the background, a blue and white sofa can be seen. There are striped cushions and beige plush cushions on the sofa. A transparent glass and a plate with a teacup and saucer are placed on the left side of the sofa in the frame, and a yellow box containing red items is placed on the right side of the sofa.\nThe camera shakes slightly. A person wearing a dark purple robe is sitting on the sofa facing the table. There are golden bracelets and rings on this person's left hand. This person picks up the brown pen placed on the paper, removes the cap with the left hand and starts to write on the white paper. This person writes down the letters \"C\", \"A\" and \"T\" one after another. After writing, this person puts the pen back on the table. Then, this person picks up the white paper with both hands and shows it to the camera. The word \"CAT\" is clearly written on the paper. Then, this person puts the paper back on the table and takes out some alphabet paper pieces from the sofa. The paper piece \"C\" is blue, the paper piece \"A\" is orange, and the paper piece \"T\" is green. This person places these colored alphabet paper pieces on the white paper one by one to spell the word \"CAT\".", "events": [ { "event": "At the beginning of the video, the camera is focused on a table covered with a patterned tablecloth. There is a piece of white paper and a brown pen on the table. In the background, a blue and white sofa can be seen. There are striped cushions and beige plush cushions on the sofa. A transparent glass and a plate with a teacup and saucer are placed on the left side of the sofa in the frame, and a yellow box containing red items is placed on the right side of the sofa.", "visual_elements": [ { "content": "The camera is focused on a table covered with a patterned tablecloth.", "type": "camera", "weight": 3 }, { "content": "There is a piece of white paper and a brown pen on the table.", "type": "attribute", "weight": 3 }, { "content": "In the background, a blue and white sofa can be seen.", "type": "scene", "weight": 2 }, { "content": "There are striped cushions and beige plush cushions on the sofa.", "type": "attribute", "weight": 2 }, { "content": "A transparent glass and a plate with a teacup and saucer are placed on the left side of the sofa in the frame.", "type": "attribute", "weight": 2 }, { "content": "A yellow box containing red items is placed on the right side of the sofa.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera shakes slightly. A person wearing a dark purple robe is sitting on the sofa facing the table. There are golden bracelets and rings on this person's left hand. This person picks up the brown pen placed on the paper, removes the cap with the left hand and starts to write on the white paper. This person writes down the letters \"C\", \"A\" and \"T\" one after another. After writing, this person puts the pen back on the table. Then, this person picks up the white paper with both hands and shows it to the camera. The word \"CAT\" is clearly written on the paper. Then, this person puts the paper back on the table and takes out some alphabet paper pieces from the sofa. The paper piece \"C\" is blue, the paper piece \"A\" is orange, and the paper piece \"T\" is green. This person places these colored alphabet paper pieces on the white paper one by one to spell the word \"CAT\".", "visual_elements": [ { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "A person wearing a dark purple robe is sitting on the sofa facing the table.", "type": "scene", "weight": 2 }, { "content": "There are golden bracelets and rings on this person's left hand.", "type": "attribute", "weight": 2 }, { "content": "This person picks up the brown pen placed on the paper.", "type": "action", "weight": 3 }, { "content": "This person removes the cap with the left hand and starts to write on the white paper.", "type": "action", "weight": 3 }, { "content": "This person writes down the letters \"C\", \"A\" and \"T\" one after another.", "type": "action", "weight": 3 }, { "content": "After writing, this person puts the pen back on the table.", "type": "action", "weight": 3 }, { "content": "This person picks up the white paper with both hands and shows it to the camera.", "type": "action", "weight": 3 }, { "content": "The word \"CAT\" is clearly written on the paper.", "type": "attribute", "weight": 3 }, { "content": "This person puts the paper back on the table.", "type": "action", "weight": 3 }, { "content": "This person takes out some alphabet paper pieces from the sofa.", "type": "action", "weight": 3 }, { "content": "The paper piece \"C\" is blue.", "type": "attribute", "weight": 1 }, { "content": "The paper piece \"A\" is orange.", "type": "attribute", "weight": 1 }, { "content": "The paper piece \"T\" is green.", "type": "attribute", "weight": 1 }, { "content": "This person places these colored alphabet paper pieces on the white paper one by one to spell the word \"CAT\".", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 21 }, { "index": "TUNA_0490", "video_path": "PerceptionTest/video_2652.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 28.73, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a woman wearing a sleeveless top with a floral pattern. She is wearing a golden necklace, and there is a ring on her left middle finger and another on her left ring finger respectively. In the background, the kitchen dishwasher and coffee machine can be seen. A white shelf is placed on the woman's left rear, and a kettle, wine bottles and other boxes are placed on the shelf.\nThe camera remains stable. Standing at the kitchen countertop, this woman first takes out a packet of tea bags from a red cardboard box with the words \"Chá Misto de Maçã, Abacaxi, Cravo e Canela\" written on it. She opens the tea bag and puts it into a glass cup with a floral pattern. Then, she picks up a teapot with a red floral pattern and starts to pour hot water into the glass cup. After pouring the water, she puts the teapot back on the countertop and then stirs the tea in the cup with a spoon. Then, she scoops out some white sugar from a transparent jar with a black square pattern, where there are red circles in the black squares, and puts the sugar into the tea and stirs again. Finally, she claps her hands. A green tablecloth can be seen on the countertop, with spoons, a red lid and a jar with yellow fruits on top placed on it.", "events": [ { "event": "At the beginning of the video, the camera is focused on a woman wearing a sleeveless top with a floral pattern. She is wearing a golden necklace, and there is a ring on her left middle finger and another on her left ring finger respectively. In the background, the kitchen dishwasher and coffee machine can be seen. A white shelf is placed on the woman's left rear, and a kettle, wine bottles and other boxes are placed on the shelf.", "visual_elements": [ { "content": "The camera is focused on a woman wearing a sleeveless top with a floral pattern.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a golden necklace.", "type": "attribute", "weight": 2 }, { "content": "There is a ring on the left middle finger and another on the left ring finger respectively.", "type": "attribute", "weight": 2 }, { "content": "In the background, the kitchen dishwasher and coffee machine can be seen.", "type": "scene", "weight": 2 }, { "content": "A white shelf is placed on the woman's left rear.", "type": "attribute", "weight": 2 }, { "content": "A kettle, wine bottles and other boxes are placed on the shelf.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera remains stable. Standing at the kitchen countertop, this woman first takes out a packet of tea bags from a red cardboard box with the words \"Chá Misto de Maçã, Abacaxi, Cravo e Canela\" written on it. She opens the tea bag and puts it into a glass cup with a floral pattern. Then, she picks up a teapot with a red floral pattern and starts to pour hot water into the glass cup. After pouring the water, she puts the teapot back on the countertop and then stirs the tea in the cup with a spoon. Then, she scoops out some white sugar from a transparent jar with a black square pattern, where there are red circles in the black squares, and puts the sugar into the tea and stirs again. Finally, she claps her hands. A green tablecloth can be seen on the countertop, with spoons, a red lid and a jar with yellow fruits on top placed on it.", "visual_elements": [ { "content": "The camera remains stable.", "type": "camera", "weight": 3 }, { "content": "Standing at the kitchen countertop, this woman first takes out a packet of tea bags from a red cardboard box.", "type": "action", "weight": 3 }, { "content": "There are the words \"Chá Misto de Maçã, Abacaxi, Cravo e Canela\" written on the box.", "type": "attribute", "weight": 1 }, { "content": "The woman opens the tea bag.", "type": "action", "weight": 3 }, { "content": "The woman puts the tea bag into a glass cup with a floral pattern.", "type": "action", "weight": 3 }, { "content": "The woman picks up a teapot with a red floral pattern.", "type": "action", "weight": 3 }, { "content": "The woman starts to pour hot water into the glass cup.", "type": "action", "weight": 3 }, { "content": "After pouring the water, the woman puts the teapot back on the countertop.", "type": "action", "weight": 3 }, { "content": "The woman stirs the tea in the cup with a spoon.", "type": "action", "weight": 3 }, { "content": "The woman scoops out some white sugar from a transparent jar with a black square pattern.", "type": "action", "weight": 3 }, { "content": "There are red circles in the black squares.", "type": "attribute", "weight": 2 }, { "content": "The woman puts the sugar into the tea and stirs again.", "type": "action", "weight": 3 }, { "content": "The woman claps her hands.", "type": "action", "weight": 3 }, { "content": "A green tablecloth can be seen on the countertop.", "type": "attribute", "weight": 2 }, { "content": "There are spoons, a red lid and a jar with yellow fruits on top placing on the green tablecloth.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 21 }, { "index": "TUNA_0491", "video_path": "PerceptionTest/video_2677.mp4", "video_source": "PerceptionTest", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Daily Life", "duration": 19.04, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a white table with three items on it. A red U-shaped magnet is on the far left, an orange is in the centre and a white round bottle cap is on the far right.\nThe camera then moves down and the items on the tabletop leave the frame. The camera focuses on the floor under the table, which is covered with tiles with pink patterns. Some wires are on the tiles. The bottom of the frame shows the legs of a figure wearing light coloured shorts with patterns.\nThe camera then moves up again and the view returns to the table. The number of items on the table changes from three to four, and a red furry fruit appears in the upper left corner of the red U-shaped magnet.", "events": [ { "event": "The video begins with the camera focusing on a white table with three items on it. A red U-shaped magnet is on the far left, an orange is in the centre and a white round bottle cap is on the far right.", "visual_elements": [ { "content": "The camera focuses on a white table.", "type": "camera", "weight": 3 }, { "content": "Three items are on the table.", "type": "attribute", "weight": 2 }, { "content": "A red U-shaped magnet is on the far left.", "type": "attribute", "weight": 3 }, { "content": "An orange is in the centre.", "type": "attribute", "weight": 3 }, { "content": "A white round bottle cap is on the far right.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera then moves down and the items on the tabletop leave the frame. The camera focuses on the floor under the table, which is covered with tiles with pink patterns. Some wires are on the tiles. The bottom of the frame shows the legs of a figure wearing light coloured shorts with patterns.", "visual_elements": [ { "content": "The camera moves down.", "type": "camera", "weight": 3 }, { "content": "The items on the tabletop leave the frame.", "type": "attribute", "weight": 2 }, { "content": "The camera focuses on the floor under the table.", "type": "camera", "weight": 3 }, { "content": "The floor is covered with tiles with pink patterns.", "type": "attribute", "weight": 3 }, { "content": "Some wires are on the tiles.", "type": "attribute", "weight": 1 }, { "content": "The bottom of the frame shows the legs of a figure wearing light coloured shorts with patterns.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera then moves up again and the view returns to the table. The number of items on the table changes from three to four, and a red furry fruit appears in the upper left corner of the red U-shaped magnet.", "visual_elements": [ { "content": "The camera moves up again.", "type": "camera", "weight": 3 }, { "content": "The view returns to the table.", "type": "camera", "weight": 3 }, { "content": "The number of items on the table changes from three to four.", "type": "attribute", "weight": 3 }, { "content": "A red furry fruit appears in the upper left corner of the red U-shaped magnet.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0492", "video_path": "PerceptionTest/video_2701.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.09, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the floor, which is covered in white tiles with patterns of small blue squares. The feet of a person wearing black trousers and black sandals can be seen at the top of the frame. To the left of the frame is a white wall. A white socket on the wall is in the bottom left corner of the frame. Some tools are scattered on the floor against the wall, and on the far left is a plug with a cord. To the right of the plug is a screwdriver with an orange handle. To the right of the screwdriver is a black power tool with its cord attached to the plug. On the far right is a roll of duct tape.\nThe person at the top of the frame then bends down to pick up the orange-handled screwdriver with his right hand. The person uses the screwdriver to pull the tape closer to him, and then uses the screwdriver to push the tape back in place. The person then puts the screwdriver back in place. The person picks up the plug and inserts it into the socket on the wall. The person then picks up the screwdriver again and turns the screwdriver by hand a few times before putting it down again. The person then reaches out his hand to adjust the position of the tape. The person then picks up the black power tool and presses the switch. After waiting a few seconds, the person presses the button again to turn off the power tool and put it back in place. The video ends with the person reaching hands toward the upper right corner of the frame.", "events": [ { "event": "The video begins with the camera focusing on the floor, which is covered in white tiles with patterns of small blue squares. The feet of a person wearing black trousers and black sandals can be seen at the top of the frame. To the left of the frame is a white wall. A white socket on the wall is in the bottom left corner of the frame. Some tools are scattered on the floor against the wall, and on the far left is a plug with a cord. To the right of the plug is a screwdriver with an orange handle. To the right of the screwdriver is a black power tool with its cord attached to the plug. On the far right is a roll of duct tape.", "visual_elements": [ { "content": "The camera focuses on the floor.", "type": "camera", "weight": 3 }, { "content": "The floor is covered in white tiles with patterns of small blue squares.", "type": "scene", "weight": 2 }, { "content": "The feet of a person wearing black trousers and black sandals can be seen at the top of the frame.", "type": "attribute", "weight": 2 }, { "content": "To the left of the frame is a white wall.", "type": "scene", "weight": 2 }, { "content": "A white socket on the wall is in the bottom left corner of the frame.", "type": "attribute", "weight": 1 }, { "content": "Some tools are scattered on the floor against the wall.", "type": "attribute", "weight": 3 }, { "content": "On the far left is a plug with a cord.", "type": "attribute", "weight": 3 }, { "content": "To the right of the plug is a screwdriver with an orange handle.", "type": "attribute", "weight": 3 }, { "content": "To the right of the screwdriver is a black power tool.", "type": "attribute", "weight": 3 }, { "content": "The cord of the black power tool is attached to the plug.", "type": "attribute", "weight": 2 }, { "content": "On the far right is a roll of duct tape.", "type": "attribute", "weight": 3 } ] }, { "event": "The person at the top of the frame then bends down to pick up the orange-handled screwdriver with his right hand. The person uses the screwdriver to pull the tape closer to him, and then uses the screwdriver to push the tape back in place. The person then puts the screwdriver back in place. The person picks up the plug and inserts it into the socket on the wall. The person then picks up the screwdriver again and turns the screwdriver by hand a few times before putting it down again. The person then reaches out his hand to adjust the position of the tape. The person then picks up the black power tool and presses the switch. After waiting a few seconds, the person presses the button again to turn off the power tool and put it back in place. The video ends with the person reaching hands toward the upper right corner of the frame.", "visual_elements": [ { "content": "The person at the top of the frame then bends down to pick up the orange-handled screwdriver with his right hand.", "type": "action", "weight": 3 }, { "content": "The person uses the screwdriver to pull the tape closer to him, and then uses the screwdriver to push the tape back in place.", "type": "action", "weight": 3 }, { "content": "The person then puts the screwdriver back in place.", "type": "action", "weight": 2 }, { "content": "The person picks up the plug and inserts it into the socket on the wall.", "type": "action", "weight": 3 }, { "content": "The person picks up the screwdriver again and turns the screwdriver by hand a few times before putting it down again.", "type": "action", "weight": 3 }, { "content": "The person reaches out his hand to adjust the position of the tape.", "type": "action", "weight": 2 }, { "content": "The person then picks up the black power tool and presses the switch.", "type": "action", "weight": 3 }, { "content": "After waiting a few seconds, the person presses the button again to turn off the power tool and put it back in place.", "type": "action", "weight": 3 }, { "content": "The video ends with the person reaching hands toward the upper right corner of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 20 }, { "index": "TUNA_0493", "video_path": "PerceptionTest/video_2708.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 25.06, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a table with an orange milk carton on the far right of the table. Two eggs are on the left of the milk carton, and an onion is on the left of the eggs. In the middle of the background is a wall, and to the left of the wall is a white frosted door. To the right of the wall is a window with white lace curtains.\nThen a hand appears on the right side of the frame holding a red pan. The owner of the hand shows the camera the front and back of the pan. The person then places the pan on the tabletop behind the onion. The person's hand then enters from the right side of the frame holding a black remote control. The person changes the direction of his hand as he puts the remote into the pan. Instead, the person places the remote in the empty space between the pan and the onion.\nThe person then brings in a pair of dark grey shorts with an orange waistband from the right side of the frame. The person folds the shorts twice and then places them in the pan. The person then enters from the right side of the frame with another fork and a knife. The person places the fork and the knife on the table in front of the onion. At the end of the video, the person extends his right hand to the left side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a table with an orange milk carton on the far right of the table. Two eggs are on the left of the milk carton, and an onion is on the left of the eggs. In the middle of the background is a wall, and to the left of the wall is a white frosted door. To the right of the wall is a window with white lace curtains.", "visual_elements": [ { "content": "The camera focuses on a table.", "type": "camera", "weight": 3 }, { "content": "An orange milk carton is on the far right of the table.", "type": "attribute", "weight": 3 }, { "content": "Two eggs are on the left of the milk carton.", "type": "attribute", "weight": 3 }, { "content": "An onion is on the left of the eggs.", "type": "attribute", "weight": 3 }, { "content": "In the middle of the background is a wall.", "type": "scene", "weight": 2 }, { "content": "To the left of the wall is a white frosted door.", "type": "scene", "weight": 1 }, { "content": "To the right of the wall is a window.", "type": "scene", "weight": 2 }, { "content": "White lace curtains hang on the wall.", "type": "scene", "weight": 2 } ] }, { "event": "Then a hand appears on the right side of the frame holding a red pan. The owner of the hand shows the camera the front and back of the pan. The person then places the pan on the tabletop behind the onion. The person's hand then enters from the right side of the frame holding a black remote control. The person changes the direction of his hand as he puts the remote into the pan. Instead, the person places the remote in the empty space between the pan and the onion.", "visual_elements": [ { "content": "A hand appears on the right side of the frame.", "type": "attribute", "weight": 3 }, { "content": "The han holds a red pan.", "type": "attribute", "weight": 3 }, { "content": "The owner of the hand shows the camera the front and back of the pan.", "type": "action", "weight": 3 }, { "content": "The person places the pan on the tabletop behind the onion.", "type": "action", "weight": 3 }, { "content": "The person's hand enters from the right side of the frame holding a black remote control.", "type": "action", "weight": 3 }, { "content": "The person changes the direction of his hand as he puts the remote into the pan.", "type": "action", "weight": 3 }, { "content": "Instead, the person places the remote in the empty space between the pan and the onion.", "type": "action", "weight": 3 } ] }, { "event": "The person then brings in a pair of dark grey shorts with an orange waistband from the right side of the frame. The person folds the shorts twice and then places them in the pan. The person then enters from the right side of the frame with another fork and a knife. The person places the fork and the knife on the table in front of the onion. At the end of the video, the person extends his right hand to the left side of the frame.", "visual_elements": [ { "content": "The person then brings in a pair of dark grey shorts with an orange waistband from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The person folds the shorts twice and then places them in the pan.", "type": "action", "weight": 3 }, { "content": "The person enters from the right side of the frame with another fork and a knife.", "type": "action", "weight": 3 }, { "content": "The person places the fork and the knife on the table in front of the onion.", "type": "action", "weight": 3 }, { "content": "At the end of the video, the person extends his right hand to the left side of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0494", "video_path": "PerceptionTest/video_2763.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Daily Life", "duration": 34.97, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden door with a black clothesline across it. Hanging from the line is a black coat hanger, on which hangs a black backpack. A silver fork is stuck on the coat hanger. On the wall to the right of the door hangs a beige crossbody bag with the word \"BAYFLOW\" printed on it.\nA hand holding a black coat hanger enters the frame from the right side of the screen. Two seconds later, the hand with the coat hanger leaves from the right side of the frame. A hand holding a notebook with a brown cover enters from the right side of the frame. On the cover of the notebook is a yellow sticker with a black car on it. The hand adjusts the position of the notebook in front of the camera. Then the hand holding the notebook leaves from the right side of the frame.\nA hand holding a book with a yellow cover enters the frame from the right side of the screen. The words \"Pokemon Omnibus\" and various cartoon characters are on the cover. The hand adjusts the position of the book in front of the camera and then leaves with the book from the right side of the frame. A hand holding a book with a blue cover then enters from the right side of the frame. A bat-winged cartoon figure and some Japanese characters are on the cover. The hand adjusts the position of the book in front of the camera and then leaves with the book from the right side of the frame.\nThen a hand holding a black mobile phone enters the frame from the right side of the screen. The back of the phone faces the camera and an apple logo is on the back of the phone. Two seconds later, the hand leaves with the mobile phone from the right side of the frame. Finally, a hand holding a green cucumber enters the frame from the right side of the screen. The hand displays the cucumber in front of the camera and then leaves with the cucumber from the right side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a wooden door with a black clothesline across it. Hanging from the line is a black coat hanger, on which hangs a black backpack. A silver fork is stuck on the coat hanger. On the wall to the right of the door hangs a beige crossbody bag with the word \"BAYFLOW\" printed on it.", "visual_elements": [ { "content": "The camera focuses on a wooden door.", "type": "camera", "weight": 3 }, { "content": "A black clothesline stretches across the door.", "type": "attribute", "weight": 2 }, { "content": "Hanging from the line is a black coat hanger.", "type": "attribute", "weight": 3 }, { "content": "A black backpack hangs from the hanger.", "type": "attribute", "weight": 3 }, { "content": "A silver fork is stuck on the coat hanger.", "type": "attribute", "weight": 2 }, { "content": "On the wall to the right of the door hangs a beige crossbody bag.", "type": "attribute", "weight": 2 }, { "content": "The beige crossbody bag has the word \"BAYFLOW\" printed on it.", "type": "attribute", "weight": 2 } ] }, { "event": "A hand holding a black coat hanger enters the frame from the right side of the screen. Two seconds later, the hand with the coat hanger leaves from the right side of the frame. A hand holding a notebook with a brown cover enters from the right side of the frame. On the cover of the notebook is a yellow sticker with a black car on it. The hand adjusts the position of the notebook in front of the camera. Then the hand holding the notebook leaves from the right side of the frame.", "visual_elements": [ { "content": "A hand holding a black coat hanger enters the frame from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "Two seconds later, the hand with the coat hanger leaves from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "A hand holding a notebook with a brown cover enters from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "On the cover of the notebook is a yellow sticker with a black car on it.", "type": "attribute", "weight": 2 }, { "content": "The hand adjusts the position of the notebook in front of the camera.", "type": "action", "weight": 2 }, { "content": "The hand holding the notebook leaves from the right side of the frame.", "type": "action", "weight": 3 } ] }, { "event": "A hand holding a book with a yellow cover enters the frame from the right side of the screen. The words \"Pokemon Omnibus\" and various cartoon characters are on the cover. The hand adjusts the position of the book in front of the camera and then leaves with the book from the right side of the frame. A hand holding a book with a blue cover then enters from the right side of the frame. A bat-winged cartoon figure and some Japanese characters are on the cover. The hand adjusts the position of the book in front of the camera and then leaves with the book from the right side of the frame.", "visual_elements": [ { "content": "A hand holding a book with a yellow cover enters the frame from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The words \"Pokemon Omnibus\" and various cartoon characters are on the cover.", "type": "attribute", "weight": 2 }, { "content": "The hand adjusts the position of the book in front of the camera and then leaves with the book from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "A hand holding a book with a blue cover then enters from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "A bat-winged cartoon figure and some Japanese characters are on the cover.", "type": "attribute", "weight": 2 }, { "content": "The hand adjusts the position of the book in front of the camera and then leaves with the book from the right side of the frame.", "type": "action", "weight": 3 } ] }, { "event": "Then a hand holding a black mobile phone enters the frame from the right side of the screen. The back of the phone faces the camera and an apple logo is on the back of the phone. Two seconds later, the hand leaves with the mobile phone from the right side of the frame. Finally, a hand holding a green cucumber enters the frame from the right side of the screen. The hand displays the cucumber in front of the camera and then leaves with the cucumber from the right side of the frame.", "visual_elements": [ { "content": "A hand holding a black mobile phone enters the frame from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The back of the phone faces the camera.", "type": "attribute", "weight": 2 }, { "content": "An apple logo is on the back of the phone.", "type": "attribute", "weight": 1 }, { "content": "Two seconds later, the hand leaves with the mobile phone from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "A hand holding a green cucumber enters the frame from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The hand displays the cucumber in front of the camera and then leaves with the cucumber from the right side of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0495", "video_path": "PerceptionTest/video_2768.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 20.26, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down on a green table. At the top of the frame a person can be seen sitting behind the table. The person puts his hands separately on the tabletop. Two items are placed on the tabletop between the two hands. Near the right hand is a black ballpoint pen and near the left hand is a white tissue.\nThe hands first complete several consecutive claps above the table. Then the hands put on the tissue and the ballpoint pen respectively. The right hand then picks up the ballpoint pen to tap the tabletop, while the left hand presses a corner of the tissue.\nAfter the right hand stops moving, the hands start folding the tissue paper. First, the left hand holds down a corner of the tissue while the right hand folds the tissue paper in half to form a triangle. Then, the hands fold a small right triangle of the triangular tissue inwards to form a right-angled trapezoid. Next, the hands fold the rest parts of the tissue inwards to form a smaller right-angle trapezoid. Finally, the hands place the folded tissue in the centre of the table before the hands rest next to the tissue and the ballpoint pent respectively.", "events": [ { "event": "The video begins with the camera looking down on a green table. At the top of the frame a person can be seen sitting behind the table. The person puts his hands separately on the tabletop. Two items are placed on the tabletop between the two hands. Near the right hand is a black ballpoint pen and near the left hand is a white tissue.", "visual_elements": [ { "content": "The camera looks down on a green table.", "type": "camera", "weight": 3 }, { "content": "At the top of the frame a person can be seen sitting behind the table.", "type": "scene", "weight": 1 }, { "content": "The person puts his hands separately on the tabletop.", "type": "attribute", "weight": 3 }, { "content": "Two items are placed on the tabletop between the two hands.", "type": "attribute", "weight": 2 }, { "content": "Near the right hand is a black ballpoint pen.", "type": "attribute", "weight": 3 }, { "content": "Near the left hand is a white tissue.", "type": "attribute", "weight": 3 } ] }, { "event": "The hands first complete several consecutive claps above the table. Then the hands put on the tissue and the ballpoint pen respectively. The right hand then picks up the ballpoint pen to tap the tabletop, while the left hand presses a corner of the tissue.", "visual_elements": [ { "content": "The hands first complete several consecutive claps above the table.", "type": "action", "weight": 3 }, { "content": "The hands put on the tissue and the ballpoint pen respectively.", "type": "action", "weight": 2 }, { "content": "The right hand picks up the ballpoint pen.", "type": "action", "weight": 3 }, { "content": "The left hand presses a corner of the tissue.", "type": "action", "weight": 3 } ] }, { "event": "After the right hand stops moving, the hands start folding the tissue paper. First, the left hand holds down a corner of the tissue while the right hand folds the tissue paper in half to form a triangle. Then, the hands fold a small right triangle of the triangular tissue inwards to form a right-angled trapezoid. Next, the hands fold the rest parts of the tissue inwards to form a smaller right-angle trapezoid. Finally, the hands place the folded tissue in the centre of the table before the hands rest next to the tissue and the ballpoint pent respectively.", "visual_elements": [ { "content": "After the right hand stops moving, both hands start folding the tissue paper.", "type": "action", "weight": 2 }, { "content": "First, the left hand holds down a corner of the tissue while the right hand folds the tissue paper in half to form a triangle.", "type": "action", "weight": 3 }, { "content": "The hands fold a small right triangle of the triangular tissue inwards to form a right-angled trapezoid.", "type": "action", "weight": 3 }, { "content": "The hands fold the rest parts of the tissue inwards to form a smaller right-angle trapezoid.", "type": "action", "weight": 3 }, { "content": "Finally, the hands place the folded tissue in the centre of the table.", "type": "action", "weight": 2 }, { "content": "The hands rest next to the tissue and the ballpoint pent respectively.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0496", "video_path": "PerceptionTest/video_278.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 28.59, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a light grey wooden tabletop. In the middle of the table is a piece of white paper horizontal lines. To the left of the lined paper is a pair of yellow scissors and a red remote control. On the babletop, before the paper is a yellow and black screwdriver, a marker, and a silver flashlight. On the right side of the frame, a hand reaches for the items above the lined paper.\nThe hand on the right side of the frame picks up the marker. Holding the pen in the right hand, the left hand enters from the bottom of the frame and removes the cap of the marker. Next, the left hand fixes the white lined paper and the right hand takes the pen and starts writing from left to right on the lined paper. Finally, the hand writes the word \"MAT\" on the paper. After writing, the left hand puts the cap back on the pen and the right hand puts the pen back in its original position. Next, the hands pick up the paper and approach the camera to show the word \"MAT\" on it. Then, the hands put the paper back on the table and leave the frame.\nThe hand then takes out a magnet in the shape of a dark green letter \"M\" from the right side of the frame and places it on the right side of the paper. Next, the hand takes out a magnet in the shape of a purple letter \"O\" and places it next to the green letter \"M\". Finally, the hand takes out a magnet in the shape of a green letter \"T\" and places it next to the purple letter \"O\" to form the word \"MOT\". The hand then leaves the screen.", "events": [ { "event": "The video begins with the camera focusing on a light grey wooden tabletop. In the middle of the table is a piece of white paper horizontal lines. To the left of the lined paper is a pair of yellow scissors and a red remote control. On the babletop, before the paper is a yellow and black screwdriver, a marker, and a silver flashlight. On the right side of the frame, a hand reaches for the items above the lined paper.", "visual_elements": [ { "content": "The video begins with the camera focusing on a light grey wooden tabletop.", "type": "camera", "weight": 3 }, { "content": "In the middle of the table is a piece of white paper horizontal lines.", "type": "attribute", "weight": 3 }, { "content": "To the left of the lined paper is a pair of yellow scissors and a red remote control.", "type": "attribute", "weight": 2 }, { "content": "On the babletop, before the paper is a yellow and black screwdriver, a marker, and a silver flashlight.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the frame, a hand reaches for the items above the lined paper.", "type": "action", "weight": 2 } ] }, { "event": "The hand on the right side of the frame picks up the marker. Holding the pen in the right hand, the left hand enters from the bottom of the frame and removes the cap of the marker. Next, the left hand fixes the white lined paper and the right hand takes the pen and starts writing from left to right on the lined paper. Finally, the hand writes the word \"MAT\" on the paper. After writing, the left hand puts the cap back on the pen and the right hand puts the pen back in its original position. Next, the hands pick up the paper and approach the camera to show the word \"MAT\" on it. Then, the hands put the paper back on the table and leave the frame.", "visual_elements": [ { "content": "The hand on the right side of the frame picks up the marker.", "type": "action", "weight": 3 }, { "content": "Holding the pen in the right hand, the left hand enters from the bottom of the frame and removes the cap of the marker.", "type": "action", "weight": 3 }, { "content": "Next, the left hand fixes the white lined paper and the right hand takes the pen and starts writing from left to right on the lined paper.", "type": "action", "weight": 3 }, { "content": "Finally, the hand writes the word \"MAT\" on the paper.", "type": "attribute", "weight": 3 }, { "content": "After writing, the left hand puts the cap back on the pen and the right hand puts the pen back in its original position.", "type": "action", "weight": 3 }, { "content": "Next, the hands pick up the paper and approach the camera to show the word \"MAT\" on it.", "type": "action", "weight": 3 }, { "content": "Then, the hands put the paper back on the table and leave the frame.", "type": "action", "weight": 2 } ] }, { "event": "The hand then takes out a magnet in the shape of a dark green letter \"M\" from the right side of the frame and places it on the right side of the paper. Next, the hand takes out a magnet in the shape of a purple letter \"O\" and places it next to the green letter \"M\". Finally, the hand takes out a magnet in the shape of a green letter \"T\" and places it next to the purple letter \"O\" to form the word \"MOT\". The hand then leaves the screen.", "visual_elements": [ { "content": "The hand then takes out a magnet in the shape of a dark green letter \"M\" from the right side of the frame and places it on the right side of the paper.", "type": "action", "weight": 3 }, { "content": "Next, the hand takes out a magnet in the shape of a purple letter \"O\" and places it next to the green letter \"M\".", "type": "action", "weight": 3 }, { "content": "Finally, the hand takes out a magnet in the shape of a green letter \"T\" and places it next to the purple letter \"O\" to form the word \"MOT\".", "type": "action", "weight": 3 }, { "content": "The hand leaves the screen.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0497", "video_path": "PerceptionTest/video_2785.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 12.86, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a wooden coffee table. On the tabletop near the camera is a black and grey hair band. A black remote control sits diagonally opposite the hair band. In the background, a white television cabinet can be seen, with a yellow carpet on the floor in front of it, and a fallen pink toy pig and the wheels of a child's toy car in the back. On the right side of the frame is a foot wearing black trousers and the owner of the foot sits by the coffee table.\nA hand reaches out from the right side of the frame. The hand holds a cup with black and white patterns and places the cup on the coffee table. The hand departs from the right side of the frame and the hand then repeats the action of placing the cup three more times. Four identical cups are lined up on the coffee table. The last cup placed on the coffee table has the bottom facing up. The other three cups face up.", "events": [ { "event": "The video begins with the camera focusing on a wooden coffee table. On the tabletop near the camera is a black and grey hair band. A black remote control sits diagonally opposite the hair band. In the background, a white television cabinet can be seen, with a yellow carpet on the floor in front of it, and a fallen pink toy pig and the wheels of a child's toy car in the back. On the right side of the frame is a foot wearing black trousers and the owner of the foot sits by the coffee table.", "visual_elements": [ { "content": "The camera focuses on a wooden coffee table.", "type": "camera", "weight": 3 }, { "content": "On the tabletop near the camera is a black and grey hair band.", "type": "attribute", "weight": 3 }, { "content": "A black remote control sits diagonally opposite the hair band.", "type": "attribute", "weight": 3 }, { "content": "In the background, a white television cabinet can be seen.", "type": "scene", "weight": 2 }, { "content": "A yellow carpet is on the floor in front of the white television cabinet.", "type": "scene", "weight": 2 }, { "content": "A fallen pink toy pig and the wheels of a child's toy car are in the back.", "type": "scene", "weight": 1 }, { "content": "On the right side of the frame is a foot wearing black trousers.", "type": "attribute", "weight": 2 }, { "content": "The owner of the foot sits by the coffee table.", "type": "attribute", "weight": 1 } ] }, { "event": "A hand reaches out from the right side of the frame. The hand holds a cup with black and white patterns and places the cup on the coffee table. The hand departs from the right side of the frame and the hand then repeats the action of placing the cup three more times. Four identical cups are lined up on the coffee table. The last cup placed on the coffee table has the bottom facing up. The other three cups face up.", "visual_elements": [ { "content": "A hand reaches out from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The hand holds a cup with black and white patterns.", "type": "attribute", "weight": 3 }, { "content": "The hand places the cup on the coffee table.", "type": "action", "weight": 3 }, { "content": "The hand departs from the right side of the frame.", "type": "action", "weight": 2 }, { "content": "The hand repeats the action of placing the cup three more times.", "type": "action", "weight": 3 }, { "content": "Four identical cups are lined up on the coffee table.", "type": "attribute", "weight": 3 }, { "content": "The last cup placed on the coffee table has the bottom facing up.", "type": "attribute", "weight": 3 }, { "content": "The other three cups face up.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0498", "video_path": "PerceptionTest/video_2791.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 17.77, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a man wearing a light coloured t-shirt and khaki shorts stands on the left side of the frame. Behind the man is a solid wood cabinet. To the right of the man is a white door, and in front of the man is a round red wooden table. There are two rows of items on the table. In the row near the camera, from left to right, are a small plastic bottle with a black cap, a black iron box and a black bottle. The second row, from left to right, is a green plastic cup, a transparent plastic jar with a blue lid and a transparent plastic jar with a pink lid.\nThe man first picks up the green plastic cup and holds it in his right hand as he unscrews the lid with his left hand. He then places the lid and the plastic cup on table separately. Next, the man extends his left hand and turns the small plastic jar with the black lid. The man then picks up the transparent plastic jar with the blue lid to unscrew the lid before he places the lid and jar separately on the table. Next, the man picks up another transparent plastic jar with the pink lid to unscrew the lid before he places the lid and jar seperately on the table. The man withdraws his hands and stands by the table with his hands hanging by his side.", "events": [ { "event": "At the beginning of the video, a man wearing a light coloured t-shirt and khaki shorts stands on the left side of the frame. Behind the man is a solid wood cabinet. To the right of the man is a white door, and in front of the man is a round red wooden table. There are two rows of items on the table. In the row near the camera, from left to right, are a small plastic bottle with a black cap, a black iron box and a black bottle. The second row, from left to right, is a green plastic cup, a transparent plastic jar with a blue lid and a transparent plastic jar with a pink lid.", "visual_elements": [ { "content": "A man wearing a light coloured t-shirt and khaki shorts stands on the left side of the frame.", "type": "attribute", "weight": 3 }, { "content": "Behind the man is a solid wood cabinet.", "type": "scene", "weight": 1 }, { "content": "To the right of the man is a white door.", "type": "scene", "weight": 1 }, { "content": "In front of the man is a round red wooden table.", "type": "attribute", "weight": 3 }, { "content": "There are two rows of items on the table.", "type": "attribute", "weight": 2 }, { "content": "In the row near the camera, from left to right, are a small plastic bottle with a black cap, a black iron box and a black bottle.", "type": "attribute", "weight": 3 }, { "content": "The second row, from left to right, is a green plastic cup, a transparent plastic jar with a blue lid and a transparent plastic jar with a pink lid.", "type": "attribute", "weight": 3 } ] }, { "event": "The man first picks up the green plastic cup and holds it in his right hand as he unscrews the lid with his left hand. He then places the lid and the plastic cup on table separately. Next, the man extends his left hand and turns the small plastic jar with the black lid. The man then picks up the transparent plastic jar with the blue lid to unscrew the lid before he places the lid and jar separately on the table. Next, the man picks up another transparent plastic jar with the pink lid to unscrew the lid before he places the lid and jar seperately on the table. The man withdraws his hands and stands by the table with his hands hanging by his side.", "visual_elements": [ { "content": "The man first picks up the green plastic cup.", "type": "action", "weight": 3 }, { "content": "The man holds the cup in his right hand as he unscrews the lid with his left hand.", "type": "action", "weight": 3 }, { "content": "The man then places the lid and the plastic cup on table separately.", "type": "action", "weight": 3 }, { "content": "Next, the man extends his left hand and turns the small plastic jar with the black lid.", "type": "action", "weight": 3 }, { "content": "The man picks up the transparent plastic jar with the blue lid.", "type": "action", "weight": 3 }, { "content": "The man unscrews the lid before he places the lid and jar separately on the table.", "type": "action", "weight": 3 }, { "content": "The man picks up another transparent plastic jar with the pink lid.", "type": "action", "weight": 3 }, { "content": "The man also unscrews the lid before he places the lid and jar seperately on the table.", "type": "action", "weight": 3 }, { "content": "The man withdraws his hands and stands by the table with his hands hanging by his side.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0499", "video_path": "PerceptionTest/video_2813.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 23.06, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down on a small wooden table with a dividing line in the centre of the table. To the left of the dividing line sits a green thermos. To the left of the thermos is a small cucumber. To the left of the dividing line, on the tabletop near the camera is another white cup with black patterns, and a fork is on the left of the cup. A spoon lies across the dividing line. On the tabletop to the right of the dividing line is a transparent glass jar with tea bags, a white bowl and a small spice bottle. In the background is a floor with light-coloured tiles with patterns of small blue squares. A person in khaki trousers stands behind the small table.\nThe person's hands enter from the top of the frame and first open the lid of the glass jar and then take out a tea bag from the jar. The right hand puts the tea bag into the white cup and the left hand puts the lid back on the glass jar. Next, the right hand picks up the green thermos and pours hot water into the cup. After the hot water is poured into the cup, the tea bag steeps in the water and the water gradually changes colour. While pouring the water, the left hand picks up the small cucumber and places it in the white bowl on the right of the dividing line. After pouring the water, the right hand puts down the thermos and the left hand picks up the spoon and passes it to the right hand. The right hand takes the spoon and stirs the tea in the cup. After the stirring is finished, the spoon stays in the cup and the hands leave the frame.", "events": [ { "event": "The video begins with the camera looking down on a small wooden table with a dividing line in the centre of the table. To the left of the dividing line sits a green thermos. To the left of the thermos is a small cucumber. To the left of the dividing line, on the tabletop near the camera is another white cup with black patterns, and a fork is on the left of the cup. A spoon lies across the dividing line. On the tabletop to the right of the dividing line is a transparent glass jar with tea bags, a white bowl and a small spice bottle. In the background is a floor with light-coloured tiles with patterns of small blue squares. A person in khaki trousers stands behind the small table.", "visual_elements": [ { "content": "The camera looks down on a small wooden table.", "type": "camera", "weight": 3 }, { "content": "The small wooden table has a dividing line in the centre of the table.", "type": "attribute", "weight": 2 }, { "content": "To the left of the dividing line sits a green thermos.", "type": "attribute", "weight": 3 }, { "content": "To the left of the thermos is a small cucumber.", "type": "attribute", "weight": 2 }, { "content": "To the left of the dividing line, on the tabletop near the camera is another white cup with black patterns.", "type": "attribute", "weight": 3 }, { "content": "A fork is on the left of the cup.", "type": "attribute", "weight": 2 }, { "content": "A spoon lies across the dividing line.", "type": "attribute", "weight": 2 }, { "content": "On the tabletop to the right of the dividing line is a transparent glass jar with tea bags, a white bowl and a small spice bottle.", "type": "attribute", "weight": 3 }, { "content": "In the background is a floor with light-coloured tiles with patterns of small blue squares.", "type": "scene", "weight": 1 }, { "content": "A person in khaki trousers stands behind the small table.", "type": "attribute", "weight": 2 } ] }, { "event": "The person's hands enter from the top of the frame and first open the lid of the glass jar and then take out a tea bag from the jar. The right hand puts the tea bag into the white cup and the left hand puts the lid back on the glass jar. Next, the right hand picks up the green thermos and pours hot water into the cup. After the hot water is poured into the cup, the tea bag steeps in the water and the water gradually changes colour. While pouring the water, the left hand picks up the small cucumber and places it in the white bowl on the right of the dividing line. After pouring the water, the right hand puts down the thermos and the left hand picks up the spoon and passes it to the right hand. The right hand takes the spoon and stirs the tea in the cup. After the stirring is finished, the spoon stays in the cup and the hands leave the frame.", "visual_elements": [ { "content": "The person's hands enter from the top of the frame.", "type": "action", "weight": 3 }, { "content": "The hand then takes out a tea bag from the jar.", "type": "action", "weight": 3 }, { "content": "The right hand puts the tea bag into the white cup and the left hand puts the lid back on the glass jar.", "type": "action", "weight": 3 }, { "content": "the right hand picks up the green thermos and pours hot water into the cup.", "type": "action", "weight": 3 }, { "content": "After the hot water is poured into the cup, the tea bag steeps in the water and the water gradually changes colour.", "type": "attribute", "weight": 2 }, { "content": "While pouring the water, the left hand picks up the small cucumber and places it in the white bowl on the right of the dividing line.", "type": "action", "weight": 3 }, { "content": "After pouring the water, the right hand puts down the thermos and the left hand picks up the spoon and passes it to the right hand.", "type": "action", "weight": 3 }, { "content": "The right hand takes the spoon and stirs the tea in the cup.", "type": "action", "weight": 3 }, { "content": "After the stirring is finished, the spoon stays in the cup and the hands leave the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 19 }, { "index": "TUNA_0500", "video_path": "PerceptionTest/video_2834.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 25.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a wooden table. A knife with a black handle rests vertically on the left side of the table, with the blade facing inwards. A blue ballpoint pen rests horizontally on the handle of the knife. A book with a green cover rests across the top right of the knife. Under the book is a walnut. A hand in a blue long sleeve rests on the book and is about to pick it up.\nThe hand holds the book upright in front of the walnut and moves it slightly from side to side. Then, the hand slowly releases the book and leaves from the right side of the frame as the book stands independently on the table. The hand then enters the frame again and holds the book to move it from side to side. Then, the hand picks up the book horizontally and the walnut behind the book reappears again in the frame.", "events": [ { "event": "The camera focuses on a wooden table. A knife with a black handle rests vertically on the left side of the table, with the blade facing inwards. A blue ballpoint pen rests horizontally on the handle of the knife. A book with a green cover rests across the top right of the knife. Under the book is a walnut. A hand in a blue long sleeve rests on the book and is about to pick it up.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "A knife with a black handle rests vertically on the left side of the table.", "type": "attribute", "weight": 3 }, { "content": "The blade faces inwards.", "type": "attribute", "weight": 1 }, { "content": "A blue ballpoint pen rests horizontally on the handle of the knife.", "type": "attribute", "weight": 2 }, { "content": "A book with a green cover rests across the top right of the knife.", "type": "attribute", "weight": 3 }, { "content": "Under the book is a walnut.", "type": "attribute", "weight": 2 }, { "content": "A hand in a blue long sleeve rests on the book and is about to pick it up.", "type": "attribute", "weight": 3 } ] }, { "event": "The hand holds the book upright in front of the walnut and moves it slightly from side to side. Then, the hand slowly releases the book and leaves from the right side of the frame as the book stands independently on the table. The hand then enters the frame again and holds the book to move it from side to side. Then, the hand picks up the book horizontally and the walnut behind the book reappears again in the frame.", "visual_elements": [ { "content": "The hand holds the book upright in front of the walnut and moves it slightly from side to side.", "type": "action", "weight": 3 }, { "content": "The hand slowly releases the book and leaves from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The book stands independently on the table.", "type": "attribute", "weight": 3 }, { "content": "The hand enters the frame again.", "type": "action", "weight": 2 }, { "content": "The hand holds the book to move it from side to side.", "type": "action", "weight": 3 }, { "content": "The hand picks up the book horizontally.", "type": "action", "weight": 3 }, { "content": "The walnut behind the book reappears again in the frame.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0501", "video_path": "PerceptionTest/video_2838.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 32.14, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a table with a red tablecloth and a row of items on the side of the table near the camera. On the far left is a white bottle. To the right of the white bottle sits an orange ball. In the very middle is a small red jar and a white cup with patterns. The red jar has white substance inside and a spoon stuck in it. On the far right is an empty coke bottle with a blue straw. Behind the Coke bottle is a black pen. In the upper left corner of the table is a glass filled with water. In the background is a couch with a patterned blanket. Between the sofa and the table stands a man.\nThe person in the middle sits down on the couch. This person wears a black coat and black trousers. The man places his hands palm down on the table. Then, his hands first move above the table and clap twice in succession. Then, he picks up the black pen on the right side of the table with his left hand. The tip of the pen strikes the table several times. He puts the pen back in its place with his left hand.\nHis right hand picks up the spoon in the jar to hold up the white substance from the small red jar and pour it into the cup on the right of the jar. The person pours a total of three spoons of the white substance from the jar into the cup. His right hand picks up the orange ball and throws it into the left hand. The orange ball moves back and forth between the right and left hands. Eventually, the ball returns to the right hand, which places it back on the table. The person first puts his hands on the table and then withdraws his hands from the table and the person behind the table stands up.", "events": [ { "event": "The video begins with the camera focusing on a table with a red tablecloth and a row of items on the side of the table near the camera. On the far left is a white bottle. To the right of the white bottle sits an orange ball. In the very middle is a small red jar and a white cup with patterns. The red jar has white substance inside and a spoon stuck in it. On the far right is an empty coke bottle with a blue straw. Behind the Coke bottle is a black pen. In the upper left corner of the table is a glass filled with water. In the background is a couch with a patterned blanket. Between the sofa and the table stands a man.", "visual_elements": [ { "content": "The video begins with the camera focusing on a table with a red tablecloth.", "type": "camera", "weight": 3 }, { "content": "A row of items are on the side of the table near the camera.", "type": "attribute", "weight": 3 }, { "content": "On the far left is a white bottle.", "type": "attribute", "weight": 3 }, { "content": "To the right of the white bottle sits an orange ball.", "type": "attribute", "weight": 3 }, { "content": "In the very middle is a small red jar and a white cup with patterns.", "type": "attribute", "weight": 3 }, { "content": "The red jar has white substance inside and a spoon stuck in it.", "type": "attribute", "weight": 2 }, { "content": "On the far right is an empty coke bottle with a blue straw.", "type": "attribute", "weight": 3 }, { "content": "Behind the Coke bottle is a black pen.", "type": "attribute", "weight": 1 }, { "content": "In the upper left corner of the table is a glass filled with water.", "type": "attribute", "weight": 1 }, { "content": "In the background is a couch with a patterned blanket.", "type": "scene", "weight": 1 }, { "content": "Between the sofa and the table stands a man.", "type": "attribute", "weight": 2 } ] }, { "event": "The person in the middle sits down on the couch. This person wears a black coat and black trousers. The man places his hands palm down on the table. Then, his hands first move above the table and clap twice in succession. Then, he picks up the black pen on the right side of the table with his left hand. The tip of the pen strikes the table several times. He puts the pen back in its place with his left hand.", "visual_elements": [ { "content": "The person in the middle sits down on the couch.", "type": "action", "weight": 3 }, { "content": "This person wears a black coat and black trousers.", "type": "attribute", "weight": 2 }, { "content": "The man places his hands palm down on the table.", "type": "attribute", "weight": 3 }, { "content": "The man's hands first move above the table and clap twice in succession.", "type": "action", "weight": 3 }, { "content": "He picks up the black pen on the right side of the table with his left hand.", "type": "action", "weight": 3 }, { "content": "The tip of the pen strikes the table several times.", "type": "action", "weight": 3 }, { "content": "He puts the pen back in its place with his left hand.", "type": "action", "weight": 2 } ] }, { "event": "His right hand picks up the spoon in the jar to hold up the white substance from the small red jar and pour it into the cup on the right of the jar. The person pours a total of three spoons of the white substance from the jar into the cup. His right hand picks up the orange ball and throws it into the left hand. The orange ball moves back and forth between the right and left hands. Eventually, the ball returns to the right hand, which places it back on the table. The person first puts his hands on the table and then withdraws his hands from the table and the person behind the table stands up.", "visual_elements": [ { "content": "His right hand picks up the spoon in the jar to hold up the white substance from the small red jar and pour it into the cup on the right of the jar.", "type": "action", "weight": 3 }, { "content": "The person pours a total of three spoons of the white substance from the jar into the cup.", "type": "action", "weight": 3 }, { "content": "His right hand picks up the orange ball and throws it into the left hand.", "type": "action", "weight": 3 }, { "content": "The orange ball moves back and forth between the right and left hands.", "type": "action", "weight": 3 }, { "content": "Eventually, the ball returns to the right hand, which places it back on the table.", "type": "action", "weight": 3 }, { "content": "The person first puts his hands on the table.", "type": "action", "weight": 3 }, { "content": "The man then withdraws his hands from the table and the person behind the table stands up.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 25 }, { "index": "TUNA_0502", "video_path": "PerceptionTest/video_2856.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 19.14, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden table. An apple with a pen stuck in it is in the middle of the table. To the right of the apple is a banknote. To the right of the note is a black wallet. There are two coins on the left of the apple. In the background is a brown backpack hanging from the back of a brown chair. Part of a red chair back is in the left side of the frame. In the background, some green plants on the far right side of a white wall is in the frame.\nA man wearing an orange and blue sports vest then enters the frame from the right. The man picks up the wallet on the table and opens it. The man holds the wallet in his left hand and reaches for the note on the table with his right hand. The man picks up the note and slips it into the wallet. The man then picks up the two coins on the table. The man first unzips one pocket of the wallet and then put the coins in the wallet as well. The man then closes the wallet and turns around to place the wallet in the brown backpack behind him. Finally, the man leaves from the right side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a wooden table. An apple with a pen stuck in it is in the middle of the table. To the right of the apple is a banknote. To the right of the note is a black wallet. There are two coins on the left of the apple. In the background is a brown backpack hanging from the back of a brown chair. Part of a red chair back is in the left side of the frame. In the background, some green plants on the far right side of a white wall is in the frame.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "An apple with a pen stuck in it is in the middle of the table.", "type": "attribute", "weight": 3 }, { "content": "To the right of the apple is a banknote.", "type": "attribute", "weight": 3 }, { "content": "To the right of the note is a black wallet.", "type": "attribute", "weight": 3 }, { "content": "There are two coins on the left of the apple.", "type": "attribute", "weight": 3 }, { "content": "In the background is a brown backpack hanging from the back of a brown chair.", "type": "scene", "weight": 2 }, { "content": "Part of a red chair back is in the left side of the frame.", "type": "scene", "weight": 1 }, { "content": "In the background, some green plants on the far right side of a white wall is in the frame.", "type": "scene", "weight": 1 } ] }, { "event": "A man wearing an orange and blue sports vest then enters the frame from the right. The man picks up the wallet on the table and opens it. The man holds the wallet in his left hand and reaches for the note on the table with his right hand. The man picks up the note and slips it into the wallet. The man then picks up the two coins on the table. The man first unzips one pocket of the wallet and then put the coins in the wallet as well. The man then closes the wallet and turns around to place the wallet in the brown backpack behind him. Finally, the man leaves from the right side of the frame.", "visual_elements": [ { "content": "A man wearing an orange and blue sports vest then enters the frame from the right.", "type": "action", "weight": 3 }, { "content": "The man picks up the wallet on the table and opens it.", "type": "action", "weight": 3 }, { "content": "The man holds the wallet in his left hand and reaches for the note on the table with his right hand.", "type": "action", "weight": 3 }, { "content": "The man picks up the note and slips it into the wallet.", "type": "action", "weight": 3 }, { "content": "The man then picks up the two coins on the table.", "type": "action", "weight": 3 }, { "content": "The man first unzips one pocket of the wallet and then put the coins in the wallet as well.", "type": "action", "weight": 3 }, { "content": "The man then closes the wallet and turns around to place the wallet in the brown backpack behind him.", "type": "action", "weight": 3 }, { "content": "Finally, the man leaves from the right side of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0503", "video_path": "PerceptionTest/video_2864.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 23.69, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden table. A pair of scissors with black handles, a pen stuck in a pink base and a yellow coin are on the table. A couch and the floor can be seen in the background.\nTwo hands enters the frame from the lower left corner of the frame. Both arms are straight above the table. The two hands are in fists with palms facing down. The two fists then flip to make the palms face up. The palms of the two hands are open and there is a small square piece of white paper in the palm of the left hand. The two hands then make fists again. The two fists circles outwards alternately. A few seconds later, the fists stop moving. The two arms are crossed and the left arm is positioned over the right arm. The hands remain in the shape of fists. The palms of the hands are open and the small white piece of paper remains in the palm of the left hand. Finally, the two hands lower down and leave the screen from the bottom left corner of the frame.", "events": [ { "event": "The video begins with the camera focusing on a wooden table. A pair of scissors with black handles, a pen stuck in a pink base and a yellow coin are on the table. A couch and the floor can be seen in the background.", "visual_elements": [ { "content": "The video begins with the camera focusing on a wooden table.", "type": "camera", "weight": 3 }, { "content": "A pair of scissors with black handles, a pen stuck in a pink base and a yellow coin are on the table.", "type": "attribute", "weight": 3 }, { "content": "A couch and the floor can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Two hands enters the frame from the lower left corner of the frame. Both arms are straight above the table. The two hands are in fists with palms facing down. The two fists then flip to make the palms face up. The palms of the two hands are open and there is a small square piece of white paper in the palm of the left hand. The two hands then make fists again. The two fists circles outwards alternately. A few seconds later, the fists stop moving. The two arms are crossed and the left arm is positioned over the right arm. The hands remain in the shape of fists. The palms of the hands are open and the small white piece of paper remains in the palm of the left hand. Finally, the two hands lower down and leave the screen from the bottom left corner of the frame.", "visual_elements": [ { "content": "Two hands enters the frame from the lower left corner of the frame.", "type": "action", "weight": 3 }, { "content": "Both arms are straight above the table.", "type": "attribute", "weight": 2 }, { "content": "The two hands are in fists with palms facing down.", "type": "attribute", "weight": 3 }, { "content": "The two fists then flip to make the palms face up.", "type": "action", "weight": 3 }, { "content": "The palms of the two hands are open.", "type": "action", "weight": 3 }, { "content": "A small square piece of white paper is in the palm of the left hand.", "type": "attribute", "weight": 3 }, { "content": "The two hands then make fists again.", "type": "action", "weight": 3 }, { "content": "The two fists circles outwards alternately.", "type": "action", "weight": 3 }, { "content": "A few seconds later, the fists stop moving.", "type": "action", "weight": 2 }, { "content": "The two arms are crossed and the left arm is positioned over the right arm.", "type": "attribute", "weight": 3 }, { "content": "The palms of the hands are open and the small white piece of paper remains in the palm of the left hand.", "type": "action", "weight": 3 }, { "content": "Finally, the two hands lower down and leave the screen from the bottom left corner of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0504", "video_path": "PerceptionTest/video_2884.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 23.7, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down on a wooden table covered with a dark blue square cloth mat. There are two wooden coasters on the cloth mat. One is located in the upper right corner of the cloth mat and the other is located in the lower left corner of the cloth mat. A bottle of mineral water is placed on the coaster in the upper right corner and a glass is placed on the coaster in the lower left corner. There is a red toy in the upper left corner of the cloth mat.\nTwo hands in a grey sweater enter from the left side of the frame. The hands pick up the bottle of mineral water bottle and unscrew the cap. The left hand then tilts the mineral water bottle and pours the water into the glass. The right hand leaves from the left side of the frame. A few seconds later, the left hand stops pouring and the right hand enters the frame and screws the cap of the mineral water bottle. The left hand puts the bottle of the mineral water back on the coaster in the upper right corner.\nThe right picks up the glass and then moves away from the left side of the frame. The left hand is in a fist at the top left corner of the dark blue cloth mat. The right hand holding the glass appears on the far left of the frame and then leaves again. The left thumb extends to touch the red toy and then retracts. The right hand holding the glass reappears on the far left side of the frame again and then leaves again. Finally, the right hand reenters the frame with the glass and places the glass back on the coaster in the lower left corner. The two hands leave from the left side of the frame.", "events": [ { "event": "The video begins with the camera looking down on a wooden table covered with a dark blue square cloth mat. There are two wooden coasters on the cloth mat. One is located in the upper right corner of the cloth mat and the other is located in the lower left corner of the cloth mat. A bottle of mineral water is placed on the coaster in the upper right corner and a glass is placed on the coaster in the lower left corner. There is a red toy in the upper left corner of the cloth mat.", "visual_elements": [ { "content": "The camera looks down on a wooden table.", "type": "camera", "weight": 3 }, { "content": "The table is covered with a dark blue square cloth mat.", "type": "attribute", "weight": 3 }, { "content": "There are two wooden coasters on the cloth mat.", "type": "attribute", "weight": 3 }, { "content": "One is located in the upper right corner of the cloth mat and the other is located in the lower left corner of the cloth mat.", "type": "attribute", "weight": 2 }, { "content": "A bottle of mineral water is placed on the coaster in the upper right corner.", "type": "attribute", "weight": 3 }, { "content": "A glass is placed on the coaster in the lower left corner.", "type": "attribute", "weight": 3 }, { "content": "There is a red toy in the upper left corner of the cloth mat.", "type": "attribute", "weight": 2 } ] }, { "event": "Two hands in a grey sweater enter from the left side of the frame. The hands pick up the bottle of mineral water bottle and unscrew the cap. The left hand then tilts the mineral water bottle and pours the water into the glass. The right hand leaves from the left side of the frame. A few seconds later, the left hand stops pouring and the right hand enters the frame and screws the cap of the mineral water bottle. The left hand puts the bottle of the mineral water back on the coaster in the upper right corner.", "visual_elements": [ { "content": "Two hands in a grey sweater enter from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The hands pick up the bottle of mineral water bottle and unscrew the cap.", "type": "action", "weight": 3 }, { "content": "The left hand then tilts the mineral water bottle and pours the water into the glass.", "type": "action", "weight": 3 }, { "content": "The right hand leaves from the left side of the frame.", "type": "action", "weight": 2 }, { "content": "A few seconds later, the left hand stops pouring.", "type": "action", "weight": 2 }, { "content": "The right hand enters the frame and screws the cap of the mineral water bottle.", "type": "action", "weight": 3 }, { "content": "The left hand puts the bottle of the mineral water back on the coaster in the upper right corner.", "type": "action", "weight": 3 } ] }, { "event": "The right picks up the glass and then moves away from the left side of the frame. The left hand is in a fist at the top left corner of the dark blue cloth mat. The right hand holding the glass appears on the far left of the frame and then leaves again. The left thumb extends to touch the red toy and then retracts. The right hand holding the glass reappears on the far left side of the frame again and then leaves again. Finally, the right hand reenters the frame with the glass and places the glass back on the coaster in the lower left corner. The two hands leave from the left side of the frame.", "visual_elements": [ { "content": "The right picks up the glass and then moves away from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The left hand is in a fist at the top left corner of the dark blue cloth mat.", "type": "attribute", "weight": 3 }, { "content": "The right hand holding the glass appears on the far left of the frame and then leaves again.", "type": "action", "weight": 3 }, { "content": "The left thumb extends to touch the red toy and then retracts.", "type": "action", "weight": 2 }, { "content": "The right hand holding the glass reappears on the far left side of the frame again and then leaves again.", "type": "action", "weight": 3 }, { "content": "The right hand reenters the frame with the glass and places the glass back on the coaster in the lower left corner.", "type": "action", "weight": 3 }, { "content": "The two hands leave from the left side of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0505", "video_path": "PerceptionTest/video_289.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 31.35, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden table with a transparent plastic tray resting on top of it. A blank piece of paper and a black pen are on the tray. In the background is a light-coloured wall against which the table is placed.\nAn adult male right hand appears on the right side of the frame. The male right hand picks up a black pen. As his left hand enters from the right side of the frame to fix the paper, the right hand begins to write on the white paper with the pen. Since the cap of the pen is not open, the man makes a writing motion on the paper but does not write anything down. The man then puts the pen down in his right hand and picks up the blank paper. The two hands bring the paper to the camera to show it. The right hand then places the paper back into the plastic tray. The male hands leave from the right side of the frame.\nA hand pushes a book with a blue cover in the screen from the right side of the frame. A black plastic fork sits on top of the book. Behind the fork is a black wallet with a silver spoon resting it. The hand pushes the book to the right of the plastic tray and leaves the frame.\nA right hand holding a bottle of mineral water with a blue label enters the frame from the right side of the screen. The hand places the bottle on the left side of the plastic tray. The left hand then passes the right hand a black conical object. The right hand places the conical object on the blank paper. The left hand then passes the right hand a black cylindrical bottle cap. The right hand adjusts the cap's position in the hand before placing it on the blank paper. The two hands leave from the right side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a wooden table with a transparent plastic tray resting on top of it. A blank piece of paper and a black pen are on the tray. In the background is a light-coloured wall against which the table is placed.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "A transparent plastic tray rests on top of the wooden table.", "type": "attribute", "weight": 3 }, { "content": "A blank piece of paper and a black pen are on the tray.", "type": "attribute", "weight": 3 }, { "content": "In the background is a light-coloured wall against which the table is placed.", "type": "scene", "weight": 2 } ] }, { "event": "An adult male right hand appears on the right side of the frame. The male right hand picks up a black pen. As his left hand enters from the right side of the frame to fix the paper, the right hand begins to write on the white paper with the pen. Since the cap of the pen is not open, the man makes a writing motion on the paper but does not write anything down. The man then puts the pen down in his right hand and picks up the blank paper. The two hands bring the paper to the camera to show it. The right hand then places the paper back into the plastic tray. The male hands leave from the right side of the frame.", "visual_elements": [ { "content": "An adult male right hand appears on the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The male right hand picks up a black pen.", "type": "action", "weight": 3 }, { "content": "As his left hand enters from the right side of the frame to fix the paper, the right hand begins to write on the white paper with the pen.", "type": "action", "weight": 3 }, { "content": "Since the cap of the pen is not open, the man makes a writing motion on the paper but does not write anything down.", "type": "attribute", "weight": 3 }, { "content": "The man then puts the pen down in his right hand and picks up the blank paper.", "type": "action", "weight": 3 }, { "content": "The two hands bring the paper to the camera to show it.", "type": "action", "weight": 3 }, { "content": "The right hand places the paper back into the plastic tray.", "type": "action", "weight": 2 }, { "content": "The male hands leave from the right side of the frame.", "type": "action", "weight": 2 } ] }, { "event": "A hand pushes a book with a blue cover in the screen from the right side of the frame. A black plastic fork sits on top of the book. Behind the fork is a black wallet with a silver spoon resting it. The hand pushes the book to the right of the plastic tray and leaves the frame.", "visual_elements": [ { "content": "A hand pushes a book with a blue cover in the screen from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "A black plastic fork sits on top of the book.", "type": "attribute", "weight": 2 }, { "content": "Behind the fork is a black wallet.", "type": "attribute", "weight": 2 }, { "content": "A silver spoon rests on the wallet.", "type": "attribute", "weight": 2 }, { "content": "The hand pushes the book to the right of the plastic tray and leaves the frame.", "type": "action", "weight": 3 } ] }, { "event": "A right hand holding a bottle of mineral water with a blue label enters the frame from the right side of the screen. The hand places the bottle on the left side of the plastic tray. The left hand then passes the right hand a black conical object. The right hand places the conical object on the blank paper. The left hand then passes the right hand a black cylindrical bottle cap. The right hand adjusts the cap's position in the hand before placing it on the blank paper. The two hands leave from the right side of the frame.", "visual_elements": [ { "content": "A right hand holding a bottle of mineral water with a blue label enters the frame from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The hand places the bottle on the left side of the plastic tray.", "type": "action", "weight": 3 }, { "content": "The left hand then passes the right hand a black conical object.", "type": "action", "weight": 2 }, { "content": "The right hand places the conical object on the blank paper.", "type": "action", "weight": 3 }, { "content": "The left hand then passes the right hand a black cylindrical bottle cap.", "type": "action", "weight": 3 }, { "content": "The right hand adjusts the cap's position in the hand before placing it on the blank paper.", "type": "action", "weight": 3 }, { "content": "The two hands leave from the right side of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0506", "video_path": "PerceptionTest/video_2908.mp4", "video_source": "PerceptionTest", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Daily Life", "duration": 11.71, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on the surface of a dark grey marble. A number of items are placed on the marble. The rightmost item is a pair of brown shoes with pink laces. A green pen is placed horizontally across the shoes. A water bottle with a blue cap is placed vertically in the left shoe. A book with a yellow cover is on the left side of the shoe. A white jar with a blue label sits on the book. A blue plastic bag is on the far left. A black LG remote control presses on the plastic bag.\nThe camera shakes slightly and then turns to the left, making the items on the marble surface leave the frame. In the centre of the frame is a white tiled floor. The bottom right of the frame has dark grey marble table corners. The camera then turns to the right and the items on the marble return to the centre of the frame.", "events": [ { "event": "The video begins with the camera focusing on the surface of a dark grey marble. A number of items are placed on the marble. The rightmost item is a pair of brown shoes with pink laces. A green pen is placed horizontally across the shoes. A water bottle with a blue cap is placed vertically in the left shoe. A book with a yellow cover is on the left side of the shoe. A white jar with a blue label sits on the book. A blue plastic bag is on the far left. A black LG remote control presses on the plastic bag.", "visual_elements": [ { "content": "The camera focuses on the surface of a dark grey marble.", "type": "camera", "weight": 3 }, { "content": "A number of items are placed on the marble.", "type": "attribute", "weight": 2 }, { "content": "The rightmost item is a pair of brown shoes with pink laces.", "type": "attribute", "weight": 3 }, { "content": "A green pen is placed horizontally across the shoes.", "type": "attribute", "weight": 2 }, { "content": "A water bottle with a blue cap is placed vertically in the left shoe.", "type": "attribute", "weight": 3 }, { "content": "A book with a yellow cover is on the left side of the shoe.", "type": "attribute", "weight": 3 }, { "content": "A white jar with a blue label sits on the book.", "type": "attribute", "weight": 3 }, { "content": "A blue plastic bag is on the far left.", "type": "attribute", "weight": 3 }, { "content": "A black LG remote control presses on the plastic bag.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera shakes slightly and then turns to the left, making the items on the marble surface leave the frame. In the centre of the frame is a white tiled floor. The bottom right of the frame has dark grey marble table corners. The camera then turns to the right and the items on the marble return to the centre of the frame.", "visual_elements": [ { "content": "The camera shakes slightly and then turns to the left.", "type": "camera", "weight": 3 }, { "content": "The items on the marble surface leave the frame.", "type": "attribute", "weight": 2 }, { "content": "In the centre of the frame is a white tiled floor.", "type": "attribute", "weight": 3 }, { "content": "The bottom right of the frame has dark grey marble table corners.", "type": "attribute", "weight": 2 }, { "content": "The camera turns to the right.", "type": "camera", "weight": 3 }, { "content": "The items on the marble return to the centre of the frame.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0507", "video_path": "PerceptionTest/video_2947.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 17.17, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden table covered with a transparent tablecloth. In the background is a light green wall with some black peeling marks at the bottom. The floor is a white tiled floor.\nTwo hands enters from the bottom right of the frame. The hands have long thumbnails. The two hands make a clapping motion above the tabletop. After several consecutive high-fives, the hands leave from the lower right of the frame.\nThe right enters from the bottom right of the screen with a metal spoon. The hand taps the table with the side of the spoon. After several consecutive hits, the hand leaves from the bottom right of the frame with the spoon.", "events": [ { "event": "The video begins with the camera focusing on a wooden table covered with a transparent tablecloth. In the background is a light green wall with some black peeling marks at the bottom. The floor is a white tiled floor.", "visual_elements": [ { "content": "The camera focuses on a wooden table covered with a transparent tablecloth.", "type": "camera", "weight": 3 }, { "content": "The table is covered with a transparent tablecloth.", "type": "attribute", "weight": 1 }, { "content": "In the background is a light green wall.", "type": "scene", "weight": 2 }, { "content": "Some black peeling marks are at the bottom of the wall.", "type": "scene", "weight": 1 }, { "content": "The floor is a white tiled floor.", "type": "scene", "weight": 2 } ] }, { "event": "Two hands enters from the bottom right of the frame. The hands have long thumbnails. The two hands make a clapping motion above the tabletop. After several consecutive high-fives, the hands leave from the lower right of the frame.", "visual_elements": [ { "content": "Two hands enters from the bottom right of the frame.", "type": "action", "weight": 3 }, { "content": "The hands have long thumbnails.", "type": "attribute", "weight": 1 }, { "content": "The two hands make a clapping motion above the tabletop.", "type": "action", "weight": 3 }, { "content": "After several consecutive high-fives, the hands leave from the lower right of the frame.", "type": "action", "weight": 3 } ] }, { "event": "The right enters from the bottom right of the screen with a metal spoon. The hand taps the table with the side of the spoon. After several consecutive hits, the hand leaves from the bottom right of the frame with the spoon.", "visual_elements": [ { "content": "The right enters from the bottom right of the screen with a metal spoon.", "type": "action", "weight": 3 }, { "content": "The hand taps the table with the side of the spoon.", "type": "action", "weight": 3 }, { "content": "After several consecutive hits, the hand leaves from the bottom right of the frame with the spoon.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0508", "video_path": "PerceptionTest/video_3015.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 21.83, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a table. Two red glass bottles and a transparent glass are on the table. In the background is a landscape painting hanging on the wall, which depicts a cabin and some trees. There are also a few white candles and a small globe on the left side of the table. The light is soft and comes from a lamp in the centre of the background.\nA hand in a red long-sleeved clothes with red fingernail polish extends from the right side of the frame. As the right hand touches the red glass bottle on the right side of the frame, the left hand also enters from the right side of the frame. The right hand picks up the red glass bottle and the left hand removes the cap. The right hand tilts the red glass bottle and begins to pour a clear liquid into the glass in front of it. The liquid flows into the glass and gradually fills the glass.\nAfter pouring the liquid, the left hand first puts the cap back into the red glass bottle and then the right hand places the bottle back on the table. The right hand reaches for the glass filled with liquid. The two hands lift the glass to the far right of the frame. The glass is tilted downwards and then straightened. The hands return the glass to its original position. The two hands leave from the right side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a table. Two red glass bottles and a transparent glass are on the table. In the background is a landscape painting hanging on the wall, which depicts a cabin and some trees. There are also a few white candles and a small globe on the left side of the table. The light is soft and comes from a lamp in the centre of the background.", "visual_elements": [ { "content": "The video begins with the camera focusing on a table.", "type": "camera", "weight": 3 }, { "content": "Two red glass bottles and a transparent glass are on the table.", "type": "attribute", "weight": 3 }, { "content": "In the background is a landscape painting hanging on the wall.", "type": "scene", "weight": 2 }, { "content": "The painting depicts a cabin and some trees.", "type": "scene", "weight": 1 }, { "content": "There are also a few white candles and a small globe on the left side of the table.", "type": "scene", "weight": 2 }, { "content": "The light is soft and comes from a lamp in the centre of the background.", "type": "scene", "weight": 2 } ] }, { "event": "A hand in a red long-sleeved clothes with red fingernail polish extends from the right side of the frame. As the right hand touches the red glass bottle on the right side of the frame, the left hand also enters from the right side of the frame. The right hand picks up the red glass bottle and the left hand removes the cap. The right hand tilts the red glass bottle and begins to pour a clear liquid into the glass in front of it. The liquid flows into the glass and gradually fills the glass.", "visual_elements": [ { "content": "A hand in a red long-sleeved clothes extends from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The hand wears red fingernail polish.", "type": "attribute", "weight": 2 }, { "content": "As the right hand touches the red glass bottle on the right side of the frame, the left hand also enters from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The right hand picks up the red glass bottle and the left hand removes the cap.", "type": "action", "weight": 3 }, { "content": "The right hand tilts the red glass bottle and begins to pour a clear liquid into the glass in front of it.", "type": "action", "weight": 3 }, { "content": "The liquid flows into the glass and gradually fills the glass.", "type": "attribute", "weight": 3 } ] }, { "event": "After pouring the liquid, the left hand first puts the cap back into the red glass bottle and then the right hand places the bottle back on the table. The right hand reaches for the glass filled with liquid. The two hands lift the glass to the far right of the frame. The glass is tilted downwards and then straightened. The hands return the glass to its original position. The two hands leave from the right side of the frame.", "visual_elements": [ { "content": "After pouring the liquid, the left hand first puts the cap back into the red glass bottle.", "type": "action", "weight": 3 }, { "content": "The right hands then places the bottle back on the table.", "type": "action", "weight": 3 }, { "content": "The right hand reaches for the glass filled with liquid.", "type": "action", "weight": 2 }, { "content": "The two hands lift the glass to the far right of the frame.", "type": "action", "weight": 3 }, { "content": "The glass is tilted downwards and then straightened.", "type": "action", "weight": 2 }, { "content": "The hands return the glass to its original position.", "type": "action", "weight": 3 }, { "content": "The two hands leave from the right side of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0509", "video_path": "PerceptionTest/video_3038.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 31.35, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on an indoor scene with a wooden coffee table in the centre of the frame. A single piece of glass is placed in the middle of the table. A woven tablecloth is placed on the glass near the camera. A potted plant is placed on the glass at the rear. Two wooden armchairs can be seen in the background. Yellow curtains hang on either side of the window, and in the centre of the window is a shutter.\nTwo hands enters from the left side of the frame. The hands clap continuously above the coffee table. The two hands leave from the left side of the frame. A hand holding a spoon like object enters from the left side of the frame and leaves from the left side of the frame after hitting on the woven tablecloth with the object several times. The two hands holding a white paper enter from the left side of the frame. The hands keep folding a white paper. After folding it several times, the hands stop moving and leave from the left side of the screen with the folded paper.", "events": [ { "event": "The video begins with the camera focusing on an indoor scene with a wooden coffee table in the centre of the frame. A single piece of glass is placed in the middle of the table. A woven tablecloth is placed on the glass near the camera. A potted plant is placed on the glass at the rear. Two wooden armchairs can be seen in the background. Yellow curtains hang on either side of the window, and in the centre of the window is a shutter.", "visual_elements": [ { "content": "The camera focuses on an indoor scene.", "type": "camera", "weight": 3 }, { "content": "A wooden coffee table is in the centre of the frame.", "type": "attribute", "weight": 3 }, { "content": "A single piece of glass is placed in the middle of the table.", "type": "attribute", "weight": 2 }, { "content": "A woven tablecloth is placed on the glass near the camera.", "type": "attribute", "weight": 2 }, { "content": "A potted plant is placed on the glass at the rear.", "type": "scene", "weight": 2 }, { "content": "Two wooden armchairs can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "Yellow curtains hang on either side of the window.", "type": "scene", "weight": 2 }, { "content": "In the centre of the window is a shutter.", "type": "scene", "weight": 1 } ] }, { "event": "Two hands enters from the left side of the frame. The hands clap continuously above the coffee table. The two hands leave from the left side of the frame. A hand holding a spoon like object enters from the left side of the frame and leaves from the left side of the frame after hitting on the woven tablecloth with the object several times. The two hands holding a white paper enter from the left side of the frame. The hands keep folding a white paper. After folding it several times, the hands stop moving and leave from the left side of the screen with the folded paper.", "visual_elements": [ { "content": "Two hands enters from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The hands clap continuously above the coffee table.", "type": "action", "weight": 3 }, { "content": "The two hands leave from the left side of the frame.", "type": "action", "weight": 2 }, { "content": "A hand holding a spoon like object enters from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The hand leaves from the left side of the frame after hitting on the woven tablecloth with the object several times.", "type": "action", "weight": 3 }, { "content": "The two hands holding a white paper enter from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The hands keep folding a white paper.", "type": "action", "weight": 3 }, { "content": "After folding it several times, the hands stop moving and leave from the left side of the screen with the folded paper.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0510", "video_path": "PerceptionTest/video_3106.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 17.2, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden table on which a fork and a knife are placed. The knife and the fork are placed parallel to each other, with the knife on the right and the fork on the left. Light shines in from the right side of the frame.\nThen, a hand holding a pink plastic letter \"R\" enters from the right side of the frame and places it on the empty space to the right of the knife and fork. The hand puts down the letter and quickly leaves the frame.\nThen, the hand holding a yellow plastic square with the number \"4\" on it enters from the right side of the screen and leaves the screen after placing the yellow square on top of the pink letter \"R\".\nNext, the hand enters from the right side of the screen with a blue plastic letter \"S\", and leaves the screen after placing the blue square on top of the yellow square \"4\".\nThen, the hand enters from the right side of the screen holding an orange round plastic block with the number \"8\" on it, and leaves the screen after placing the orange round block on top of the the blue letter \"S\".\nThe hand then enters from the right side of the screen holding another pink plastic letter \"T\" and leaves the screen after placing the \"T\" on top of the orange round block \"8\".\nFinally, the hand holding a green triangular plastic block with the number \"3\" on it enters from the right side of the screen, and leaves the screen after placing green triangular block on top of the pink letter \"T\".", "events": [ { "event": "The video begins with the camera focusing on a wooden table on which a fork and a knife are placed. The knife and the fork are placed parallel to each other, with the knife on the right and the fork on the left. Light shines in from the right side of the frame.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "A fork and a knife are placed on the table.", "type": "attribute", "weight": 3 }, { "content": "The knife and the fork are placed parallel to each other, with the knife on the right and the fork on the left.", "type": "attribute", "weight": 2 }, { "content": "Light shines in from the right side of the frame.", "type": "scene", "weight": 2 } ] }, { "event": "Then, a hand holding a pink plastic letter \"R\" enters from the right side of the frame and places it on the empty space to the right of the knife and fork. The hand puts down the letter and quickly leaves the frame.", "visual_elements": [ { "content": "A hand holding a pink plastic letter \"R\" enters from the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The hand places the letter \"R\" on the empty space to the right of the knife and fork.", "type": "action", "weight": 3 }, { "content": "The hand puts down the letter and quickly leaves the frame.", "type": "action", "weight": 2 } ] }, { "event": "Then, the hand holding a yellow plastic square with the number \"4\" on it enters from the right side of the screen and leaves the screen after placing the yellow square on top of the pink letter \"R\".", "visual_elements": [ { "content": "The hand holding a yellow plastic square with the number \"4\" on it enters from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The hand leaves the screen after placing the yellow square on top of the pink letter \"R\".", "type": "action", "weight": 3 } ] }, { "event": "Next, the hand enters from the right side of the screen with a blue plastic letter \"S\", and leaves the screen after placing the blue square on top of the yellow square \"4\".", "visual_elements": [ { "content": "The hand enters from the right side of the screen with a blue plastic letter \"S\".", "type": "action", "weight": 3 }, { "content": "The hand leaves the screen after placing the blue square on top of the yellow square \"4\".", "type": "action", "weight": 3 } ] }, { "event": "Then, the hand enters from the right side of the screen holding an orange round plastic block with the number \"8\" on it, and leaves the screen after placing the orange round block on top of the the blue letter \"S\".", "visual_elements": [ { "content": "The hand enters from the right side of the screen holding an orange round plastic block with the number \"8\" on it.", "type": "action", "weight": 3 }, { "content": "The hand leaves the screen after placing the orange round block on top of the the blue letter \"S\".", "type": "action", "weight": 3 } ] }, { "event": "The hand then enters from the right side of the screen holding another pink plastic letter \"T\" and leaves the screen after placing the \"T\" on top of the orange round block \"8\".", "visual_elements": [ { "content": "The hand enters from the right side of the screen holding another pink plastic letter \"T\".", "type": "action", "weight": 3 }, { "content": "The hand leaves the screen after placing the \"T\" on top of the orange round block \"8\".", "type": "action", "weight": 3 } ] }, { "event": "Finally, the hand holding a green triangular plastic block with the number \"3\" on it enters from the right side of the screen, and leaves the screen after placing green triangular block on top of the pink letter \"T\".", "visual_elements": [ { "content": "The hand holding a green triangular plastic block with the number \"3\" on it enters from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The hand leaves the screen after placing green triangular block on top of the pink letter \"T\".", "type": "action", "weight": 3 } ] } ], "n_events": 7, "n_elements": 17 }, { "index": "TUNA_0511", "video_path": "PerceptionTest/video_3126.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 13.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a wooden coffee table that is placed on a yellow carpet. On the far left of the coffee table is a black wallet. To the right of the wallet is a large bottle of mineral water. To the right of the mineral water is an empty black cup. On the far right of the coffee table is a black and white disc-shaped ornament. A beige sofa can be seen in the background. A man wearing a light blue T-shirt and dark blue shorts sits on the couch with his hands reaching for the mineral water on the coffee table.\nThe man holds the mineral water bottle in his left hand and unscrews the cap with his right hand. His right hand places the cap on the coffee table. His two hands pick up the mineral water bottle and start pouring water into the empty cup nearby. After pouring the water, the person puts the bottle back on the coffee table. He rests his right hand on his right leg, and picks up the filled cup with his left hand and leaves from the top of the frame.", "events": [ { "event": "The video begins with the camera focusing on a wooden coffee table that is placed on a yellow carpet. On the far left of the coffee table is a black wallet. To the right of the wallet is a large bottle of mineral water. To the right of the mineral water is an empty black cup. On the far right of the coffee table is a black and white disc-shaped ornament. A beige sofa can be seen in the background. A man wearing a light blue T-shirt and dark blue shorts sits on the couch with his hands reaching for the mineral water on the coffee table.", "visual_elements": [ { "content": "The camera focuses on a wooden coffee table.", "type": "camera", "weight": 3 }, { "content": "The coffee table is placed on a yellow carpet.", "type": "attribute", "weight": 2 }, { "content": "On the far left of the coffee table is a black wallet.", "type": "attribute", "weight": 2 }, { "content": "To the right of the wallet is a large bottle of mineral water.", "type": "attribute", "weight": 3 }, { "content": "To the right of the mineral water is an empty black cup.", "type": "attribute", "weight": 3 }, { "content": "On the far right of the coffee table is a black and white disc-shaped ornament.", "type": "attribute", "weight": 2 }, { "content": "A beige sofa can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "A man wearing a light blue T-shirt and dark blue shorts sits on the sofa.", "type": "attribute", "weight": 3 }, { "content": "The man reaches his hands for the mineral water on the coffee table.", "type": "action", "weight": 2 } ] }, { "event": "The man holds the mineral water bottle in his left hand and unscrews the cap with his right hand. His right hand places the cap on the coffee table. His two hands pick up the mineral water bottle and start pouring water into the empty cup nearby. After pouring the water, the person puts the bottle back on the coffee table. He rests his right hand on his right leg, and picks up the filled cup with his left hand and leaves from the top of the frame.", "visual_elements": [ { "content": "The man holds the mineral water bottle in his left hand and unscrews the cap with his right hand.", "type": "action", "weight": 3 }, { "content": "His right hand places the cap on the coffee table.", "type": "action", "weight": 2 }, { "content": "His two hands pick up the mineral water bottle and start pouring water into the empty cup nearby.", "type": "action", "weight": 3 }, { "content": "After pouring the water, the person puts the bottle back on the coffee table.", "type": "action", "weight": 3 }, { "content": "He rests his right hand on his right leg, and picks up the filled cup with his left hand and leaves from the top of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0512", "video_path": "PerceptionTest/video_3134.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 23.4, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on an interior scene. A wall and a wooden bookshelf with some decorative items are in the background. In the foreground is a black dining table with a pair of sunglasses and a wallet on the end of the table near the camera. Black chairs surround the table.\nA man wearing a plaid shirt and jeans then enters from the left side of the frame. The man walks over to the dining room table and reaches over to pick up the sunglasses on the table. He opens the sunglasses and then closes them before putting the sunglasses back in the place. The man repeats the action of picking up the sunglasses and then putting them down again. The man then reaches for the wallet. He opens it and then closes it before putting it back in the place. The man repeats the above action two more times. The man walks backwards and leaves from the left side of the frame.", "events": [ { "event": "The video begins with the camera focusing on an interior scene. A wall and a wooden bookshelf with some decorative items are in the background. In the foreground is a black dining table with a pair of sunglasses and a wallet on the end of the table near the camera. Black chairs surround the table.", "visual_elements": [ { "content": "The camera focuses on an interior scene.", "type": "camera", "weight": 3 }, { "content": "A wall and a wooden bookshelf is in the background.", "type": "scene", "weight": 2 }, { "content": "Some decorative items are on the bookshelf.", "type": "scene", "weight": 1 }, { "content": "In the foreground is a black dining table.", "type": "attribute", "weight": 3 }, { "content": "A pair of sunglasses and a wallet are on the end of the table near the camera.", "type": "attribute", "weight": 3 }, { "content": "Black chairs surround the table.", "type": "attribute", "weight": 2 } ] }, { "event": "A man wearing a plaid shirt and jeans then enters from the left side of the frame. The man walks over to the dining room table and reaches over to pick up the sunglasses on the table. He opens the sunglasses and then closes them before putting the sunglasses back in the place. The man repeats the action of picking up the sunglasses and then putting them down again. The man then reaches for the wallet. He opens it and then closes it before putting it back in the place. The man repeats the above action two more times. The man walks backwards and leaves from the left side of the frame.", "visual_elements": [ { "content": "A man wearing a plaid shirt and jeans then enters from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The man walks over to the dining room table and reaches over to pick up the sunglasses on the table.", "type": "action", "weight": 3 }, { "content": "The man opens the sunglasses and then closes them before putting the sunglasses back in the place.", "type": "action", "weight": 3 }, { "content": "The man repeats the action of picking up the sunglasses and then putting them down again.", "type": "action", "weight": 3 }, { "content": "The man reaches for the wallet.", "type": "action", "weight": 3 }, { "content": "The man opens the wallet and then closes it before putting it back in the place.", "type": "action", "weight": 3 }, { "content": "The man repeats the above action two more times.", "type": "action", "weight": 3 }, { "content": "The man walks backwards and leaves from the left side of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0513", "video_path": "PerceptionTest/video_3138.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 21.24, "resolution": { "width": 1920, "height": 896 }, "caption": "The video begins with the camera focusing on a table covered with a white checkered tablecloth. A book is placed on the left side of the table. A woman wearing a pink T-shirt with a cartoon pattern and pink plaid shorts stands behind the table. The woman rests her hands flat on the table. There is a black floor fan in the background.\nThe woman's left hand extends out of the right side of the frame and then enters the frame holding a pink plastic cup. Her left hand transfers the cup to the right hand. The cup is placed in the palm of the right hand and raised to the camera for display. The two hands holding the cup exits the right side of the frame. The woman enters the frame from the right side of the screen, holding a blue plastic plate. The woman raises the plate to the camera for display and then leaves from the right side of the frame as she holds the plate in her left hand. The woman enters the frame from the right side of the screen as she bends down and picks up a tangle of black cables from the back of the table. The woman raises the cables to the camera to show it, and then leaves from the right side of the frame with the cable in her left hand. The woman puts her hands again flat on the table. Finally, the woman extends her right hand to the left side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a table covered with a white checkered tablecloth. A book is placed on the left side of the table. A woman wearing a pink T-shirt with a cartoon pattern and pink plaid shorts stands behind the table. The woman rests her hands flat on the table. There is a black floor fan in the background.", "visual_elements": [ { "content": "The camera focuses on a table covered with a white checkered tablecloth.", "type": "camera", "weight": 3 }, { "content": "A book is placed on the left side of the table.", "type": "attribute", "weight": 2 }, { "content": "A woman wearing a pink T-shirt with a cartoon pattern and pink plaid shorts stands behind the table.", "type": "attribute", "weight": 3 }, { "content": "The woman rests her hands flat on the table.", "type": "attribute", "weight": 2 }, { "content": "There is a black floor fan in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The woman's left hand extends out of the right side of the frame and then enters the frame holding a pink plastic cup. Her left hand transfers the cup to the right hand. The cup is placed in the palm of the right hand and raised to the camera for display. The two hands holding the cup exits the right side of the frame. The woman enters the frame from the right side of the screen, holding a blue plastic plate. The woman raises the plate to the camera for display and then leaves from the right side of the frame as she holds the plate in her left hand. The woman enters the frame from the right side of the screen as she bends down and picks up a tangle of black cables from the back of the table. The woman raises the cables to the camera to show it, and then leaves from the right side of the frame with the cable in her left hand. The woman puts her hands again flat on the table. Finally, the woman extends her right hand to the left side of the frame.", "visual_elements": [ { "content": "The woman's left hand extends out of the right side of the frame and then enters the frame holding a pink plastic cup.", "type": "action", "weight": 3 }, { "content": "Her left hand transfers the cup to the right hand. The", "type": "action", "weight": 2 }, { "content": "The cup is placed in the palm of the right hand and raised to the camera for display.", "type": "action", "weight": 3 }, { "content": "The two hands holding the cup exits the right side of the frame.", "type": "action", "weight": 2 }, { "content": "The woman enters the frame from the right side of the screen, holding a blue plastic plate.", "type": "action", "weight": 3 }, { "content": "The woman raises the plate to the camera for display.", "type": "action", "weight": 3 }, { "content": "The woman leaves from the right side of the frame as she holds the plate in her left hand.", "type": "action", "weight": 2 }, { "content": "The woman enters the frame from the right side of the screen as she bends down and picks up a tangle of black cables from the back of the table.", "type": "action", "weight": 3 }, { "content": "The woman raises the cables to the camera to show it.", "type": "action", "weight": 3 }, { "content": "The woman leaves from the right side of the frame with the cable in her left hand.", "type": "action", "weight": 3 }, { "content": "The woman puts her hands again flat on the table.", "type": "action", "weight": 2 }, { "content": "Finally, the woman extends her right hand to the left side of the frame.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0514", "video_path": "PerceptionTest/video_3207.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 24.58, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a black table with a piece of white paper on it. A pink pen presses on the white paper. Behind the paper and pen is a pink cup. In the background is a decorative cloth with red and blue patterns.\nThen a hand wearing a dark coloured long-sleeved shirt enters the frame from the top right of the screen. The hand picks up a pink pen and begins to write on the white paper. After writing, the hand puts the pen back in its place. The hand picks up the white paper and moves closer to the camera to show the writing on the white paper. The letters \"PIG\" are written on the paper. The hand then places the paper back on the table and leaves the frame.\nThe hand enters from the top of the screen holding the three white letter cards in turn. The hand places the cards on the table and leaves from the top of the screen. The letter \"G\" is placed first, followed by the letter \"P\" and finally the letter \"I\". The cards are placed in the centre of the table in the order of \"GPI\".", "events": [ { "event": "The video begins with the camera focusing on a black table with a piece of white paper on it. A pink pen presses on the white paper. Behind the paper and pen is a pink cup. In the background is a decorative cloth with red and blue patterns.", "visual_elements": [ { "content": "The camera focuses on a black table.", "type": "camera", "weight": 3 }, { "content": "A piece of white paper is on the table.", "type": "attribute", "weight": 3 }, { "content": "A pink pen presses on the white paper.", "type": "attribute", "weight": 3 }, { "content": "Behind the paper and pen is a pink cup.", "type": "attribute", "weight": 2 }, { "content": "In the background is a decorative cloth with red and blue patterns.", "type": "scene", "weight": 2 } ] }, { "event": "Then a hand wearing a dark coloured long-sleeved shirt enters the frame from the top right of the screen. The hand picks up a pink pen and begins to write on the white paper. After writing, the hand puts the pen back in its place. The hand picks up the white paper and moves closer to the camera to show the writing on the white paper. The letters \"PIG\" are written on the paper. The hand then places the paper back on the table and leaves the frame.", "visual_elements": [ { "content": "A hand wearing a dark coloured long-sleeved shirt enters the frame from the top right of the screen.", "type": "action", "weight": 3 }, { "content": "The hand picks up a pink pen and begins to write on the white paper.", "type": "action", "weight": 3 }, { "content": "After writing, the hand puts the pen back in its place.", "type": "action", "weight": 3 }, { "content": "The hand picks up the white paper and moves closer to the camera to show the writing on the white paper.", "type": "action", "weight": 3 }, { "content": "The letters \"PIG\" are written on the paper.", "type": "attribute", "weight": 3 }, { "content": "The hand places the paper back on the table and leaves the frame.", "type": "action", "weight": 3 } ] }, { "event": "The hand enters from the top of the screen holding the three white letter cards in turn. The hand places the cards on the table and leaves from the top of the screen. The letter \"G\" is placed first, followed by the letter \"P\" and finally the letter \"I\". The cards are placed in the centre of the table in the order of \"GPI\".", "visual_elements": [ { "content": "The hand enters from the top of the screen holding the three white letter cards in turn.", "type": "action", "weight": 3 }, { "content": "The hand places the cards on the table and leaves from the top of the screen.", "type": "action", "weight": 3 }, { "content": "The letter \"G\" is placed first, followed by the letter \"P\" and finally the letter \"I\".", "type": "action", "weight": 3 }, { "content": "The cards are placed in the centre of the table in the order of \"GPI\".", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0515", "video_path": "PerceptionTest/video_321.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 20.27, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a brown table. A book, a knife, an apple, a small green banana, a glass and a ceramic mug are on the part of the table away from the camera. In the background is a light-coloured floor. To the right of the table is a couch with green and grey cushions. A person in dark brown trousers sits with his legs crossed on the couch next to the table.\nThe right hand of the person sitting on the couch enters the frame, holding a yellow bottle. The right hand turns the yellow bottle to show both the front and back of the bottle to the camera. The right hand then leaves from the right side of the frame holding the yellow bottle.\nThe right hand enters from the right side of the frame holding another transparent bottle with a brown cap. The person turns the bottle back and forth with his right hand to show different parts of the bottle to the camera. The person leaves from the right side of the frame with the transparent bottle.\nNext, the right hand enters from the right side of the frame holding a coloured pen. The right hand turns the pen in front of the camera and then leaves from the right side of the frame holding the pen.\nAfterwards, the right hand enters from the right side of the screen, holding a white radish with green leaves. The right hand turns the white radish in front of the camera and then leaves from the right side of the frame with the radish.", "events": [ { "event": "The video begins with the camera focusing on a brown table. A book, a knife, an apple, a small green banana, a glass and a ceramic mug are on the part of the table away from the camera. In the background is a light-coloured floor. To the right of the table is a couch with green and grey cushions. A person in dark brown trousers sits with his legs crossed on the couch next to the table.", "visual_elements": [ { "content": "The video begins with the camera focusing on a brown table.", "type": "camera", "weight": 3 }, { "content": "A book, a knife, an apple, a small green banana, a glass and a ceramic mug are on the part of the table away from the camera.", "type": "scene", "weight": 2 }, { "content": "In the background is a light-coloured floor.", "type": "scene", "weight": 1 }, { "content": "To the right of the table is a couch with green and grey cushions.", "type": "attribute", "weight": 2 }, { "content": "A man in dark brown trousers sits with his legs crossed on the couch next to the table.", "type": "attribute", "weight": 3 } ] }, { "event": "The right hand of the person sitting on the couch enters the frame, holding a yellow bottle. The right hand turns the yellow bottle to show both the front and back of the bottle to the camera. The right hand then leaves from the right side of the frame holding the yellow bottle.", "visual_elements": [ { "content": "The right hand of the person sitting on the couch enters the frame.", "type": "action", "weight": 3 }, { "content": "The hand holds a yellow bottle.", "type": "attribute", "weight": 3 }, { "content": "The right hand turns the yellow bottle to show both the front and back of the bottle to the camera.", "type": "action", "weight": 3 }, { "content": "The right hand leaves from the right side of the frame holding the yellow bottle.", "type": "action", "weight": 2 } ] }, { "event": "The right hand enters from the right side of the frame holding another transparent bottle with a brown cap. The person turns the bottle back and forth with his right hand to show different parts of the bottle to the camera. The person leaves from the right side of the frame with the transparent bottle.", "visual_elements": [ { "content": "The right hand enters from the right side of the frame holding another transparent bottle with a brown cap.", "type": "action", "weight": 3 }, { "content": "The person turns the bottle back and forth with his right hand to show different parts of the bottle to the camera.", "type": "action", "weight": 3 }, { "content": "The person leaves from the right side of the frame with the transparent bottle.", "type": "action", "weight": 2 } ] }, { "event": "Next, the right hand enters from the right side of the frame holding a coloured pen. The right hand turns the pen in front of the camera and then leaves from the right side of the frame holding the pen.", "visual_elements": [ { "content": "The right hand enters from the right side of the frame holding a coloured pen.", "type": "action", "weight": 3 }, { "content": "The right hand turns the pen in front of the camera.", "type": "action", "weight": 3 }, { "content": "The right hand leaves from the right side of the frame holding the pen.", "type": "action", "weight": 2 } ] }, { "event": "Afterwards, the right hand enters from the right side of the screen, holding a white radish with green leaves. The right hand turns the white radish in front of the camera and then leaves from the right side of the frame with the radish.", "visual_elements": [ { "content": "The right hand enters from the right side of the screen, holding a white radish with green leaves.", "type": "action", "weight": 3 }, { "content": "The right hand turns the white radish in front of the camera.", "type": "action", "weight": 3 }, { "content": "The right hand leaves from the right side of the frame with the radish.", "type": "action", "weight": 2 } ] } ], "n_events": 5, "n_elements": 18 }, { "index": "TUNA_0516", "video_path": "PerceptionTest/video_3211.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 14.63, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a textured wooden table. A box of \"blend-a-med\" toothpaste is on the table and the toothpaste box is rectangular in shape with white and red as the main colours.\nA hand then enters from the top side of the screen holding a black remote control. The hand presses down with the remote control, and then the hand leaves from the top side of the screen with the remote control. The hand then enters from the top of the screen holding a black smartphone. A few seconds later, the hand holding the mobile phone leaves from the top of the screen. Next, the hand enters from the top of the screen holding a pinkish-purple stuffed rabbit. After a few seconds, the hand leaves from the top of the screen holding the stuffed animal.", "events": [ { "event": "The video begins with the camera focusing on a textured wooden table. A box of \"blend-a-med\" toothpaste is on the table and the toothpaste box is rectangular in shape with white and red as the main colours.", "visual_elements": [ { "content": "The video begins with the camera focusing on a textured wooden table.", "type": "camera", "weight": 3 }, { "content": "A box of \"blend-a-med\" toothpaste is on the table.", "type": "attribute", "weight": 3 }, { "content": "The toothpaste box is rectangular in shape with white and red as the main colours.", "type": "attribute", "weight": 2 } ] }, { "event": "A hand then enters from the top side of the screen holding a black remote control. The hand presses down with the remote control, and then the hand leaves from the top side of the screen with the remote control. The hand then enters from the top of the screen holding a black smartphone. A few seconds later, the hand holding the mobile phone leaves from the top of the screen. Next, the hand enters from the top of the screen holding a pinkish-purple stuffed rabbit. After a few seconds, the hand leaves from the top of the screen holding the stuffed animal.", "visual_elements": [ { "content": "A hand then enters from the top side of the screen holding a black remote control.", "type": "action", "weight": 3 }, { "content": "The hand presses down with the remote control.", "type": "action", "weight": 3 }, { "content": "The hand leaves from the top side of the screen with the remote control.", "type": "action", "weight": 2 }, { "content": "The hand then enters from the top of the screen holding a black smartphone.", "type": "action", "weight": 3 }, { "content": "A few seconds later, the hand holding the mobile phone leaves from the top of the screen.", "type": "action", "weight": 2 }, { "content": "The hand enters from the top of the screen holding a pinkish-purple stuffed rabbit.", "type": "action", "weight": 3 }, { "content": "After a few seconds, the hand leaves from the top of the screen holding the stuffed animal.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0517", "video_path": "PerceptionTest/video_3243.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.88, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a dining table with some ingredients and tablewares on it. Behind the table stands a man in a blue jacket. On the table, near the camera's side, an orange plate is on the far left. Some cut carrots and tomatoes and a fork are in the plate. On the right side of the plate are three pink paper-cutting letters arranged in a single line. The letters read \"D\", \"O\", and \"G\" from left to right. To the right of the paper cutting a gold aluminium lid. On the side of the table away from the camera, a silver teapot with two slices of bread and an avocado on it is on the far left. To the right of the teapot is a plate of lettuce leaves. On the plate is a knife with a black handle. To the right of the lettuce leaves is a bottle of olive oil and a jar of salt.\nThe person at the back of the table disrupts the paper-cutting letters on the table and then rearranges them into a pattern of \"D\", \"O\" and \"G\" from left to right. The person then takes the knife from the plate of lettuce leaves with his right hand and holds the leaves on the plate with his left hand. The person takes the knife and slowly tears two leaves on the plate into pieces. Next, this person picks up the plate with the carrots and tomatoes and uses a fork to push some of the vegetables onto the plate with lettuce. The person then puts the plate back in place. With his right hand, this person pinches some grains of salt from the salt jar and sprinkles the salt onto the plate in front of him. The person then picks up the olive oil bottle with his left hand and pours some olive oil onto the plate in front of him. At the end of the video, the person again reaches out his right hand to take the fork on the plate near the camera.", "events": [ { "event": "The video begins with the camera focusing on a dining table with some ingredients and tablewares on it. Behind the table stands a man in a blue jacket. On the table, near the camera's side, an orange plate is on the far left. Some cut carrots and tomatoes and a fork are in the plate. On the right side of the plate are three pink paper-cutting letters arranged in a single line. The letters read \"D\", \"O\", and \"G\" from left to right. To the right of the paper cutting a gold aluminium lid. On the side of the table away from the camera, a silver teapot with two slices of bread and an avocado on it is on the far left. To the right of the teapot is a plate of lettuce leaves. On the plate is a knife with a black handle. To the right of the lettuce leaves is a bottle of olive oil and a jar of salt.", "visual_elements": [ { "content": "The camera focuses on a dining table.", "type": "camera", "weight": 3 }, { "content": "Some ingredients and tablewares are on it.", "type": "attribute", "weight": 1 }, { "content": "Behind the table stands a man in a blue jacket.", "type": "attribute", "weight": 2 }, { "content": "On the table, near the camera's side, an orange plate is on the far left.", "type": "attribute", "weight": 2 }, { "content": "Some cut carrots and tomatoes and a fork are in the plate.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the plate are three pink paper-cutting letters arranged in a single line.", "type": "attribute", "weight": 3 }, { "content": "The letters read \"D\", \"O\", and \"G\" from left to right.", "type": "attribute", "weight": 3 }, { "content": "To the right of the paper cutting a gold aluminium lid.", "type": "attribute", "weight": 1 }, { "content": "On the side of the table away from the camera, a silver teapot is on the far left.", "type": "attribute", "weight": 3 }, { "content": "Two slices of bread and an avocado are on the teapot.", "type": "attribute", "weight": 3 }, { "content": "To the right of the teapot is a plate of lettuce leaves.", "type": "attribute", "weight": 3 }, { "content": "On the plate is a knife with a black handle.", "type": "attribute", "weight": 1 }, { "content": "To the right of the lettuce leaves is a bottle of olive oil and a jar of salt.", "type": "attribute", "weight": 3 } ] }, { "event": "The person at the back of the table disrupts the paper-cutting letters on the table and then rearranges them into a pattern of \"D\", \"O\" and \"G\" from left to right. The person then takes the knife from the plate of lettuce leaves with his right hand and holds the leaves on the plate with his left hand. The person takes the knife and slowly tears two leaves on the plate into pieces. Next, this person picks up the plate with the carrots and tomatoes and uses a fork to push some of the vegetables onto the plate with lettuce. The person then puts the plate back in place. With his right hand, this person pinches some grains of salt from the salt jar and sprinkles the salt onto the plate in front of him. The person then picks up the olive oil bottle with his left hand and pours some olive oil onto the plate in front of him. At the end of the video, the person again reaches out his right hand to take the fork on the plate near the camera.", "visual_elements": [ { "content": "The person at the back of the table disrupts the paper-cutting letters on the table.", "type": "action", "weight": 3 }, { "content": "The person then rearranges them into a pattern of \"D\", \"O\" and \"G\" from left to right.", "type": "action", "weight": 3 }, { "content": "The person takes the knife from the plate of lettuce leaves with his right hand and holds the leaves on the plate with his left hand.", "type": "action", "weight": 3 }, { "content": "The person takes the knife and slowly tears two leaves on the plate into pieces.", "type": "action", "weight": 3 }, { "content": "Next, this person picks up the plate with the carrots and tomatoes and uses a fork to push some of the vegetables onto the plate with lettuce.", "type": "action", "weight": 3 }, { "content": "The person then puts the plate back in place.", "type": "action", "weight": 2 }, { "content": "With his right hand, this person pinches some grains of salt from the salt jar and sprinkles the salt onto the plate in front of him.", "type": "action", "weight": 3 }, { "content": "The person then picks up the olive oil bottle with his left hand and pours some olive oil onto the plate in front of him.", "type": "action", "weight": 3 }, { "content": "The person again reaches out his right hand to take the fork on the plate near the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 22 }, { "index": "TUNA_0518", "video_path": "PerceptionTest/video_3246.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 35.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a pair of black shoes with an orange tomato placed on them. The shoes are placed on a platform covered with a green checkered towel that has some cartoon figures on it. There are four buttons on the platform to the right of the shoes, and the buttons are white, black, silver and transparent in colour.\nThe camera moves to the left and the shoes and buttons leave the frame. In the centre of the frame is a white tiled floor. Part of the green checkered towel with cartoon figures can be seen on the right side of the frame.\nThe camera moves to the right and the shoes comes back into the frame. A hand reaches out from the top of the frame. The hand picks up the tomato to remove it from the shoes. The hand holds the tomato for a few seconds on the platform which is on the right of the shoes before it puts the tomato back on the shoes. The hand leaves from the top of the frame.", "events": [ { "event": "The video begins with the camera focusing on a pair of black shoes with an orange tomato placed on them. The shoes are placed on a platform covered with a green checkered towel that has some cartoon figures on it. There are four buttons on the platform to the right of the shoes, and the buttons are white, black, silver and transparent in colour.", "visual_elements": [ { "content": "The camera focuses on a pair of black shoes.", "type": "camera", "weight": 3 }, { "content": "An orange tomato is placed on the shoes.", "type": "attribute", "weight": 3 }, { "content": "The shoes are placed on a platform covered with a green checkered towel.", "type": "attribute", "weight": 2 }, { "content": "Some cartoon figures on are on the green checkered towel.", "type": "attribute", "weight": 1 }, { "content": "There are four buttons on the platform to the right of the shoes.", "type": "attribute", "weight": 3 }, { "content": "The buttons are white, black, silver and transparent in colour.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves to the left and the shoes and buttons leave the frame. In the centre of the frame is a white tiled floor. Part of the green checkered towel with cartoon figures can be seen on the right side of the frame.", "visual_elements": [ { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The shoes and buttons leave the frame.", "type": "attribute", "weight": 2 }, { "content": "In the centre of the frame is a white tiled floor.", "type": "attribute", "weight": 3 }, { "content": "Part of the green checkered towel with cartoon figures can be seen on the right side of the frame.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves to the right and the shoes comes back into the frame. A hand reaches out from the top of the frame. The hand picks up the tomato to remove it from the shoes. The hand holds the tomato for a few seconds on the platform which is on the right of the shoes before it puts the tomato back on the shoes. The hand leaves from the top of the frame.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The shoes comes back into the frame.", "type": "attribute", "weight": 2 }, { "content": "A hand reaches out from the top of the frame.", "type": "action", "weight": 3 }, { "content": "The hand picks up the tomato to remove it from the shoes.", "type": "action", "weight": 3 }, { "content": "The hand holds the tomato for a few seconds on the platform which is on the right of the shoes before it puts the tomato back on the shoes.", "type": "action", "weight": 3 }, { "content": "The hand leaves from the top of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0519", "video_path": "PerceptionTest/video_3250.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 20.83, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a brown tiled floor with intricate patterns. On the right side of the frame, a brown wooden door folds inwards. Outside the door are orange walls. A woman in black shorts stands barefoot on the floor. Only the woman's waist and below the waist is in the frame. There is a white appliance behind the woman's right side.\nThe woman takes a step forward while bending down to put down a white sheet of paper with green paint on the ground. The woman then takes two steps back and bends down to put down an orange balloon with white patterns. The balloon rolls gently on the ground. The woman then pulls out a dark brown wooden chair from behind the door. The woman places the chair diagonally by the door and then closes the brown wooden door. The balloon rolls under the chair. The paper on the floor, the balloon and the chair are squeezed by the door to move outwards and approch the camera. The woman then stops pushing the door and opens it. The woman walks around the chair to the front of the camera.", "events": [ { "event": "The video begins with the camera focusing on a brown tiled floor with intricate patterns. On the right side of the frame, a brown wooden door folds inwards. Outside the door are orange walls. A woman in black shorts stands barefoot on the floor. Only the woman's waist and below the waist is in the frame. There is a white appliance behind the woman's right side.", "visual_elements": [ { "content": "The camera focuses on a brown tiled floor with intricate patterns.", "type": "camera", "weight": 3 }, { "content": "On the right side of the frame, a brown wooden door folds inwards.", "type": "attribute", "weight": 3 }, { "content": "Outside the door are orange walls.", "type": "attribute", "weight": 2 }, { "content": "A woman in black shorts stands barefoot on the floor.", "type": "attribute", "weight": 3 }, { "content": "Only the woman's waist and below the waist is in the frame.", "type": "attribute", "weight": 2 }, { "content": "There is a white appliance behind the woman's right side.", "type": "scene", "weight": 1 } ] }, { "event": "The woman takes a step forward while bending down to put down a white sheet of paper with green paint on the ground. The woman then takes two steps back and bends down to put down an orange balloon with white patterns. The balloon rolls gently on the ground. The woman then pulls out a dark brown wooden chair from behind the door. The woman places the chair diagonally by the door and then closes the brown wooden door. The balloon rolls under the chair. The paper on the floor, the balloon and the chair are squeezed by the door to move outwards and approch the camera. The woman then stops pushing the door and opens it. The woman walks around the chair to the front of the camera.", "visual_elements": [ { "content": "The woman takes a step forward while bending down to put down a white sheet of paper with green paint on the ground.", "type": "action", "weight": 3 }, { "content": "The woman takes two steps back and bends down to put down an orange balloon with white patterns.", "type": "action", "weight": 3 }, { "content": "The balloon rolls gently on the ground.", "type": "action", "weight": 2 }, { "content": "The woman then pulls out a dark brown wooden chair from behind the door.", "type": "action", "weight": 3 }, { "content": "The woman places the chair diagonally by the door.", "type": "action", "weight": 3 }, { "content": "The woman then closes the brown wooden door.", "type": "action", "weight": 3 }, { "content": "The balloon rolls under the chair.", "type": "action", "weight": 2 }, { "content": "The paper on the floor, the balloon and the chair are squeezed by the door to move outwards and approch the camera.", "type": "action", "weight": 3 }, { "content": "The woman then stops pushing the door and opens it. The woman walks around the chair to the front of the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0520", "video_path": "PerceptionTest/video_3261.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 19.75, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a table with a colourful checkered tablecloth. On the table is a green cup with a white spoon in it. To the right of the cup is a glass bowl filled with beige squares. A slice of bread is placed on the glass bowl. To the left of the cup is a yellow cardboard box. There is also a fork, an upside-down glass bowl and a white kettle on the table in the background.\nTwo hands reach out from the top of the frame. A black watch is on the left wrist. The two hands reach for the yellow cardboard box on the table. The hands open the box and take out a tea bag from it. The left hand leaves from the right side of the frame and the right hand places the tea bag into the green cup. The right hand then picks up the white electric kettle and pours water into the cup. The right hand returns the kettle in the place.\nThe right hand reaches again for the green cup and the left hand reenters the frame to pick up the tab left outside of the tea bag that is steeping in the cup. The right hand picks up the spoon in the cup and begins to stir the tea. After a few stirs, the right hand puts the spoon back into the cup and the left hand releases the tab. The left hand then takes away the bread slice on the glass bowl and the right hand takes one of the squares from the bowl and places it in the cup. Next, the right hand takes the spoon again in the cup to stir the tea while the left hand picks up the tea bag tab outside the cup. Finally, the hands stop moving and leave from the right side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a table with a colourful checkered tablecloth. On the table is a green cup with a white spoon in it. To the right of the cup is a glass bowl filled with beige squares. A slice of bread is placed on the glass bowl. To the left of the cup is a yellow cardboard box. There is also a fork, an upside-down glass bowl and a white kettle on the table in the background.", "visual_elements": [ { "content": "The camera focuses on a table.", "type": "camera", "weight": 3 }, { "content": "The table is covered with a colourful checkered tablecloth.", "type": "attribute", "weight": 2 }, { "content": "On the table is a green cup.", "type": "attribute", "weight": 3 }, { "content": "A white spoon is in the cup.", "type": "attribute", "weight": 2 }, { "content": "To the right of the cup is a glass bowl filled with beige squares.", "type": "attribute", "weight": 3 }, { "content": "A slice of bread is placed on the glass bowl.", "type": "attribute", "weight": 2 }, { "content": "To the left of the cup is a yellow cardboard box.", "type": "attribute", "weight": 3 }, { "content": "There is also a fork, an upside-down glass bowl and a white kettle on the table in the background.", "type": "scene", "weight": 2 } ] }, { "event": "Two hands reach out from the top of the frame. A black watch is on the left wrist. The two hands reach for the yellow cardboard box on the table. The hands open the box and take out a tea bag from it. The left hand leaves from the right side of the frame and the right hand places the tea bag into the green cup. The right hand then picks up the white electric kettle and pours water into the cup. The right hand returns the kettle in the place.", "visual_elements": [ { "content": "Two hands reach out from the top of the frame.", "type": "action", "weight": 3 }, { "content": "A black watch is on the left wrist.", "type": "attribute", "weight": 2 }, { "content": "The two hands reach for the yellow cardboard box on the table.", "type": "action", "weight": 2 }, { "content": "The hands open the box and take out a tea bag from it.", "type": "action", "weight": 3 }, { "content": "The left hand leaves from the right side of the frame and the right hand places the tea bag into the green cup.", "type": "action", "weight": 3 }, { "content": "The right hand then picks up the white electric kettle and pours water into the cup.", "type": "action", "weight": 3 }, { "content": "The right hand returns the kettle in the place.", "type": "action", "weight": 2 } ] }, { "event": "The right hand reaches again for the green cup and the left hand reenters the frame to pick up the tab left outside of the tea bag that is steeping in the cup. The right hand picks up the spoon in the cup and begins to stir the tea. After a few stirs, the right hand puts the spoon back into the cup and the left hand releases the tab. The left hand then takes away the bread slice on the glass bowl and the right hand takes one of the squares from the bowl and places it in the cup. Next, the right hand takes the spoon again in the cup to stir the tea while the left hand picks up the tea bag tab outside the cup. Finally, the hands stop moving and leave from the right side of the frame.", "visual_elements": [ { "content": "The right hand reaches again for the green cup and the left hand reenters the frame to pick up the tab left outside of the tea bag that is steeping in the cup.", "type": "action", "weight": 3 }, { "content": "The right hand picks up the spoon in the cup and begins to stir the tea.", "type": "action", "weight": 3 }, { "content": "After a few stirs, the right hand puts the spoon back into the cup and the left hand releases the tab.", "type": "action", "weight": 3 }, { "content": "The left hand takes away the bread slice on the glass bowl and the right hand takes one of the squares from the bowl and places it in the cup.", "type": "action", "weight": 3 }, { "content": "The right hand takes the spoon again in the cup to stir the tea while the left hand picks up the tea bag tab outside the cup.", "type": "action", "weight": 3 }, { "content": "The hands stop moving and leave from the right side of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0521", "video_path": "PerceptionTest/video_3262.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 19.2, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a wooden table with a white desk lamp on the bottom right corner. On the side of the table away from the camera, a black remote control is on the far left. To the right of the remote control are two pencils and a black ballpoint pen. On the far right is a ceramic mug containing several pens. A person wearing a blue sleeveless top sits behind the table with his hands on it. The chair the person is sitting in is a long wooden chair. Green checkered curtains and white curtains and white walls can be seen in the background. On the wall on the right side of the frame is white socket with a black plug already inserted into it.\nThe person then picks up the black ballpoint pen with his right hand and then places it in its place after tapping it a few times on the table. The person then reaches out with his left hand for the power plug of the desk lamp. The person then inserts the plug into the socket on the right side of the frame. Next, this person picks up the two pencils and the ballpoint pen on the table together and puts them into the ceramic cup. The person then presses the button on the base of the table lamp with his left hand. The lamp comes on and illuminates the table. The lamp is the only source of light left in the room. At the end of the video, the person puts his hands on the tabletop.", "events": [ { "event": "The video begins with the camera focusing on a wooden table with a white desk lamp on the bottom right corner. On the side of the table away from the camera, a black remote control is on the far left. To the right of the remote control are two pencils and a black ballpoint pen. On the far right is a ceramic mug containing several pens. A person wearing a blue sleeveless top sits behind the table with his hands on it. The chair the person is sitting in is a long wooden chair. Green checkered curtains and white curtains and white walls can be seen in the background. On the wall on the right side of the frame is white socket with a black plug already inserted into it.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "A white desk lamp is on the bottom right corner of the table.", "type": "attribute", "weight": 3 }, { "content": "On the side of the table away from the camera, a black remote control is on the far left.", "type": "attribute", "weight": 3 }, { "content": "To the right of the remote control are two pencils and a black ballpoint pen.", "type": "attribute", "weight": 3 }, { "content": "On the far right is a ceramic mug containing several pens.", "type": "attribute", "weight": 3 }, { "content": "A person wearing a blue sleeveless top sits behind the table.", "type": "attribute", "weight": 3 }, { "content": "The person puts his hands on the table.", "type": "attribute", "weight": 2 }, { "content": "The chair the person is sitting in is a long wooden chair.", "type": "scene", "weight": 1 }, { "content": "Green checkered curtains and white curtains and white walls can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "On the wall on the right side of the frame is white socket.", "type": "attribute", "weight": 2 }, { "content": "A black plug is already inserted into the socket.", "type": "attribute", "weight": 2 } ] }, { "event": "The person then picks up the black ballpoint pen with his right hand and then places it in its place after tapping it a few times on the table. The person then reaches out with his left hand for the power plug of the desk lamp. The person then inserts the plug into the socket on the right side of the frame. Next, this person picks up the two pencils and the ballpoint pen on the table together and puts them into the ceramic cup. The person then presses the button on the base of the table lamp with his left hand. The lamp comes on and illuminates the table. The lamp is the only source of light left in the room. At the end of the video, the person puts his hands on the tabletop.", "visual_elements": [ { "content": "The person then picks up the black ballpoint pen with his right hand and then places it in its place after tapping it a few times on the table.", "type": "action", "weight": 3 }, { "content": "The person reaches out with his left hand for the power plug of the desk lamp.", "type": "action", "weight": 3 }, { "content": "The person inserts the plug into the socket on the right side of the frame.", "type": "action", "weight": 3 }, { "content": "This person picks up the two pencils and the ballpoint pen on the table together and puts them into the ceramic cup.", "type": "action", "weight": 3 }, { "content": "The person presses the button on the base of the table lamp with his left hand.", "type": "action", "weight": 3 }, { "content": "The lamp comes on and illuminates the table.", "type": "attribute", "weight": 3 }, { "content": "The lamp is the only source of light left in the room.", "type": "attribute", "weight": 2 }, { "content": "The person puts his hands on the tabletop.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 19 }, { "index": "TUNA_0522", "video_path": "PerceptionTest/video_3293.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 24.23, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden door. A black clothesline hangs across the back of the door. Hanging from the line is a black coat hanger. Hanging from the coat hanger is a black backpack with a book on it. On the cover of the book is an anime character with bat wings. A beige crossbody bag with \"BAIFLOW\" printed on it hangs on the wall no the right of the frame.\nA hand enters the frame from the right holding a black and silver coloured pen. The hand turns the pen slightly to show its appearance, and then the hand and pen leave the frame together. The hand then enters the frame again from the right, holding a wooden pencil this time. The hand turns the pen slightly to show its appearance, and then the hand and the pencil leave the frame together. The hand then enters the frame a third time from the right, this time holding a metal fork. The hand remains in the frame for a moment to demonstrate the appearance of the fork, and then the hand and fork leave the frame together.", "events": [ { "event": "The video begins with the camera focusing on a wooden door. A black clothesline hangs across the back of the door. Hanging from the line is a black coat hanger. Hanging from the coat hanger is a black backpack with a book on it. On the cover of the book is an anime character with bat wings. A beige crossbody bag with \"BAIFLOW\" printed on it hangs on the wall no the right of the frame.", "visual_elements": [ { "content": "The video begins with the camera focusing on a wooden door.", "type": "camera", "weight": 3 }, { "content": "A black clothesline hangs across the back of the door.", "type": "attribute", "weight": 2 }, { "content": "Hanging from the line is a black coat hanger.", "type": "attribute", "weight": 3 }, { "content": "Hanging from the coat hanger is a black backpack.", "type": "attribute", "weight": 3 }, { "content": "A book is placed on the backpack.", "type": "attribute", "weight": 3 }, { "content": "On the cover of the book is an anime character with bat wings.", "type": "attribute", "weight": 2 }, { "content": "A beige crossbody bag hangs on the wall no the right of the frame.", "type": "attribute", "weight": 3 }, { "content": "The word \"BAIFLOW\" is printed on the crossbody bag.", "type": "attribute", "weight": 2 } ] }, { "event": "A hand enters the frame from the right holding a black and silver coloured pen. The hand turns the pen slightly to show its appearance, and then the hand and pen leave the frame together. The hand then enters the frame again from the right, holding a wooden pencil this time. The hand turns the pen slightly to show its appearance, and then the hand and the pencil leave the frame together. The hand then enters the frame a third time from the right, this time holding a metal fork. The hand remains in the frame for a moment to demonstrate the appearance of the fork, and then the hand and fork leave the frame together.", "visual_elements": [ { "content": "A hand enters the frame from the right holding a black and silver coloured pen.", "type": "action", "weight": 3 }, { "content": "The hand turns the pen slightly to show its appearance.", "type": "action", "weight": 3 }, { "content": "Then the hand and pen leave the frame together.", "type": "action", "weight": 2 }, { "content": "The hand then enters the frame again from the right, holding a wooden pencil this time.", "type": "action", "weight": 3 }, { "content": "The hand turns the pen slightly to show its appearance.", "type": "action", "weight": 3 }, { "content": "Then the hand and the pencil leave the frame together.", "type": "action", "weight": 2 }, { "content": "The hand then enters the frame a third time from the right, this time holding a metal fork.", "type": "action", "weight": 3 }, { "content": "The hand remains in the frame for a moment to demonstrate the appearance of the fork.", "type": "action", "weight": 3 }, { "content": "Then the hand and fork leave the frame together.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0523", "video_path": "PerceptionTest/video_3302.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 25.45, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a wooden table with various ingredients and tablewares on it. Near the camera, a plate of sliced white bread is on the far left of the table. To the right of the bread slices is a bowl of lettuce leaves. To the upper right of the lettuce leaves is a plate of brown square slices of meat. To the lower right of the lettuce leaves is a small dish of sliced tomatoes. On the far right of the table is a small bowl containing two raw eggs and a jar with a blue wrapper. In the upper left corner of the table is a green cup. To the right of the cup is an empty plate with intricate patterns. On the tabletop, to the right of the plate are four blocks of different colours and shapes. A person in a blue blouse sits behind the table. His hands are placed on the ends of the table.\nFirst, the person swaps the positions of two pairs of the four blocks. Next, the person picks up a slice of white bread and places it on the right side of the empty plate. The person picks up another slice of bread and places it on the left side of the empty plate. The person then picks up a slice of brown meat and places it on the bread slice on the right side. Next, this person uses his right hand to pick up a few slices of tomato from the small dish and lays them flat on top of the meat slice. Then, with both hands, this person takes a few lettuce leaves from the bowl and places them on the tomato slices. Finally, this person takes the white bread on the left side of the plate and covers it on the lettuce leaves to make a sandwich. This person moves the completed sandwich to the centre of the plate. This person places their hands back on the sides of the table. Finally, this person's right hand leaves from the left side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a wooden table with various ingredients and tablewares on it. Near the camera, a plate of sliced white bread is on the far left of the table. To the right of the bread slices is a bowl of lettuce leaves. To the upper right of the lettuce leaves is a plate of brown square slices of meat. To the lower right of the lettuce leaves is a small dish of sliced tomatoes. On the far right of the table is a small bowl containing two raw eggs and a jar with a blue wrapper. In the upper left corner of the table is a green cup. To the right of the cup is an empty plate with intricate patterns. On the tabletop, to the right of the plate are four blocks of different colours and shapes. A person in a blue blouse sits behind the table. His hands are placed on the ends of the table.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "Various ingredients and tablewares are put on the table.", "type": "attribute", "weight": 2 }, { "content": "Near the camera, a plate of sliced white bread is on the far left of the table.", "type": "attribute", "weight": 3 }, { "content": "To the right of the bread slices is a bowl of lettuce leaves.", "type": "attribute", "weight": 3 }, { "content": "To the upper right of the lettuce leaves is a plate of brown square slices of meat.", "type": "attribute", "weight": 3 }, { "content": "To the lower right of the lettuce leaves is a small dish of sliced tomatoes.", "type": "attribute", "weight": 3 }, { "content": "On the far right of the table is a small bowl containing two raw eggs and a jar with a blue wrapper.", "type": "attribute", "weight": 3 }, { "content": "In the upper left corner of the table is a green cup.", "type": "attribute", "weight": 3 }, { "content": "To the right of the cup is an empty plate with intricate patterns.", "type": "attribute", "weight": 3 }, { "content": "On the tabletop, to the right of the plate are four blocks of different colours and shapes.", "type": "attribute", "weight": 3 }, { "content": "A person in a blue blouse sits behind the table.", "type": "attribute", "weight": 3 }, { "content": "His hands are placed on the ends of the table.", "type": "attribute", "weight": 2 } ] }, { "event": "First, the person swaps the positions of two pairs of the four blocks. Next, the person picks up a slice of white bread and places it on the right side of the empty plate. The person picks up another slice of bread and places it on the left side of the empty plate. The person then picks up a slice of brown meat and places it on the bread slice on the right side. Next, this person uses his right hand to pick up a few slices of tomato from the small dish and lays them flat on top of the meat slice. Then, with both hands, this person takes a few lettuce leaves from the bowl and places them on the tomato slices. Finally, this person takes the white bread on the left side of the plate and covers it on the lettuce leaves to make a sandwich. This person moves the completed sandwich to the centre of the plate. This person places their hands back on the sides of the table. Finally, this person's right hand leaves from the left side of the frame.", "visual_elements": [ { "content": "The person swaps the positions of two pairs of the four blocks.", "type": "action", "weight": 3 }, { "content": "The person picks up a slice of white bread and places it on the right side of the empty plate.", "type": "action", "weight": 3 }, { "content": "The person picks up another slice of bread and places it on the left side of the empty plate.", "type": "action", "weight": 3 }, { "content": "The person then picks up a slice of brown meat and places it on the bread slice on the right side.", "type": "action", "weight": 3 }, { "content": "This person uses his right hand to pick up a few slices of tomato from the small dish and lays them flat on top of the meat slice.", "type": "action", "weight": 3 }, { "content": "With both hands, this person takes a few lettuce leaves from the bowl and places them on the tomato slices.", "type": "action", "weight": 3 }, { "content": "This person takes the white bread on the left side of the plate and covers it on the lettuce leaves to make a sandwich.", "type": "action", "weight": 3 }, { "content": "This person moves the completed sandwich to the centre of the plate.", "type": "action", "weight": 3 }, { "content": "This person places their hands back on the sides of the table.", "type": "action", "weight": 2 }, { "content": "This person's right hand leaves from the left side of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 22 }, { "index": "TUNA_0524", "video_path": "PerceptionTest/video_3321.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 34.82, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a table covered with a white tablecloth with red patterns. In the centre of the table is a red NESCAFÉ mug. To the right of the mug is a glass bowl with white sugar cubes in it. To the left of the mug is a green ÇAYKUR tea box. A knife and forks sits on the tabletop in front of the mug. Behind the table, a person wearing a dark long-sleeved blouse rests his hands on the green tea box with his upper body visible in the frame.\nThe person's two hands open the lid of the green tea box and take out a wrapped tea bag from it. The person then opens the wrapping of the tea bag, takes out the tea bag and places the tea bag into the red NESCAFÉ mug. The person then picks up a transparent glass jug in front of him. The person pours the water from the jug into the mug. After pouring the water, the person puts down the glass jug and claps his hands together. His right hand brings in a metal spoon from the left side of the frame. The person begins to stir the tea in the mug with the spoon. When the stirring is complete, the person takes the spoon out and places it next to the knife and fork on the table. The person's right hand then leaves the left side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a table covered with a white tablecloth with red patterns. In the centre of the table is a red NESCAFÉ mug. To the right of the mug is a glass bowl with white sugar cubes in it. To the left of the mug is a green ÇAYKUR tea box. A knife and forks sits on the tabletop in front of the mug. Behind the table, a person wearing a dark long-sleeved blouse rests his hands on the green tea box with his upper body visible in the frame.", "visual_elements": [ { "content": "The camera focuses on a table.", "type": "camera", "weight": 3 }, { "content": "The table is covered with a white tablecloth with red patterns.", "type": "scene", "weight": 2 }, { "content": "In the centre of the table is a red NESCAFÉ mug.", "type": "attribute", "weight": 3 }, { "content": "To the right of the mug is a glass bowl with white sugar cubes in it.", "type": "attribute", "weight": 3 }, { "content": "To the left of the mug is a green ÇAYKUR tea box.", "type": "attribute", "weight": 3 }, { "content": "A knife and forks sits on the tabletop in front of the mug.", "type": "attribute", "weight": 3 }, { "content": "Behind the table, a person wearing a dark long-sleeved blouse rests his hands on the green tea box with his upper body visible in the frame.", "type": "attribute", "weight": 3 } ] }, { "event": "The person's two hands open the lid of the green tea box and take out a wrapped tea bag from it. The person then opens the wrapping of the tea bag, takes out the tea bag and places the tea bag into the red NESCAFÉ mug. The person then picks up a transparent glass jug in front of him. The person pours the water from the jug into the mug. After pouring the water, the person puts down the glass jug and claps his hands together. His right hand brings in a metal spoon from the left side of the frame. The person begins to stir the tea in the mug with the spoon. When the stirring is complete, the person takes the spoon out and places it next to the knife and fork on the table. The person's right hand then leaves the left side of the frame.", "visual_elements": [ { "content": "The person's two hands open the lid of the green tea box and take out a wrapped tea bag from it.", "type": "action", "weight": 3 }, { "content": "The person then opens the wrapping of the tea bag, takes out the tea bag and places the tea bag into the red NESCAFÉ mug.", "type": "action", "weight": 3 }, { "content": "The person then picks up a transparent glass jug in front of him.", "type": "action", "weight": 3 }, { "content": "The person pours the water from the jug into the mug.", "type": "action", "weight": 3 }, { "content": "After pouring the water, the person puts down the glass jug and claps his hands together.", "type": "action", "weight": 3 }, { "content": "His right hand brings in a metal spoon from the left side of the frame.", "type": "action", "weight": 2 }, { "content": "The person begins to stir the tea in the mug with the spoon.", "type": "action", "weight": 3 }, { "content": "When the stirring is complete, the person takes the spoon out and places it next to the knife and fork on the table.", "type": "action", "weight": 3 }, { "content": "The person's right hand then leaves the left side of the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0525", "video_path": "PerceptionTest/video_3341.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 12.7, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a wooden table with a round ornament on it. A person sits behind the table with his hands in fists, positioned above the table. The person wears dark clothes and has blue nail polish on the hands.\nThe camera then moves upwards slightly to show the person sitting on a beige sofa. The camera zooms in. The person slowly opens his hands. The person has a coin in the palm of his left hand. The camera then zooms out. The person clenches both fists again and the coin is encased in his hand. The camera turns to the right.The person folds his hands and shakes them from side to side. The coin is constantly exchanged in position by the left and right hands. The camera turns to the left and zooms in. The person stops moving and clenches his fists again. The person then opens his hands again, with the coin still in his left hand.", "events": [ { "event": "The video begins with the camera focusing on a wooden table with a round ornament on it. A person sits behind the table with his hands in fists, positioned above the table. The person wears dark clothes and has blue nail polish on the hands.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "A round ornament is on the table.", "type": "attribute", "weight": 2 }, { "content": "A person sits behind the table with his hands in fists, positioned above the table.", "type": "attribute", "weight": 3 }, { "content": "The person wears dark clothes and has blue nail polish on the hands.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera then moves upwards slightly to show the person sitting on a beige sofa. The camera zooms in. The person slowly opens his hands. The person has a coin in the palm of his left hand. The camera then zooms out. The person clenches both fists again and the coin is encased in his hand. The camera turns to the right.The person folds his hands and shakes them from side to side. The coin is constantly exchanged in position by the left and right hands. The camera turns to the left and zooms in. The person stops moving and clenches his fists again. The person then opens his hands again, with the coin still in his left hand.", "visual_elements": [ { "content": "The camera moves upwards slightly.", "type": "camera", "weight": 3 }, { "content": "The person sits on a beige sofa.", "type": "scene", "weight": 1 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The person slowly opens his hands.", "type": "action", "weight": 3 }, { "content": "The person has a coin in the palm of his left hand.", "type": "attribute", "weight": 3 }, { "content": "The camera zooms out.", "type": "camera", "weight": 3 }, { "content": "The person clenches both fists again and the coin is encased in his hand.", "type": "action", "weight": 3 }, { "content": "The camera turns to the right.", "type": "camera", "weight": 3 }, { "content": "The person folds his hands and shakes them from side to side.", "type": "action", "weight": 3 }, { "content": "The coin is constantly exchanged in position by the left and right hands.", "type": "action", "weight": 3 }, { "content": "The camera turns to the left and zooms in.", "type": "camera", "weight": 3 }, { "content": "The person stops moving and clenches his fists again.", "type": "action", "weight": 3 }, { "content": "The person then opens his hands again, with the coin still in his left hand.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0526", "video_path": "PerceptionTest/video_3364.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 24.07, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a wooden table with a blank piece of paper placed in the centre of it. At the upper left of the blank paper is a white cup with patterns. At the bottom left of the blank paper is a fork. On the right side of the blank paper is a pen with a blue cap. A person in khaki shorts stands behind the table. In the background is a white tiled floor with blue square patterns. The camera is steady and the view looks down on the table from above.\nThe two hands of the person standing behind the table enters the frame from the top of the screen. The person picks up the pen on the right side of the table and removes the cap. The person then holds the paper with the left hand and starts writing with the pen on the blank paper with the right hand. The person writes the letters \"HEN\" from left to right on the blank paper. After writing, the person places the pen back on the right side of the table and then holds up the paper with the letters on it to show in front of the camera. The person then places the paper back on the table.\nThe person's right hand brings in a green \"H\" letter toy from the left side of the frame and places it under the letter \"H\" written on the paper. Then, the right hand brings in a pink \"E\" toy from the left side of the frame and places it to the right of the green \"H\" letter. Finally, the right hand brings in a blue \"N\" toy from the left side of the frame and places it to the right of the pink \"E\". The right hand leaves the frame.", "events": [ { "event": "The video begins with the camera focusing on a wooden table with a blank piece of paper placed in the centre of it. At the upper left of the blank paper is a white cup with patterns. At the bottom left of the blank paper is a fork. On the right side of the blank paper is a pen with a blue cap. A person in khaki shorts stands behind the table. In the background is a white tiled floor with blue square patterns. The camera is steady and the view looks down on the table from above.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "A blank piece of paper is placed in the centre of the table.", "type": "attribute", "weight": 3 }, { "content": "At the upper left of the blank paper is a white cup with patterns.", "type": "attribute", "weight": 3 }, { "content": "At the bottom left of the blank paper is a fork.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the blank paper is a pen with a blue cap.", "type": "attribute", "weight": 3 }, { "content": "A person in khaki shorts stands behind the table.", "type": "attribute", "weight": 3 }, { "content": "In the background is a white tiled floor with blue square patterns.", "type": "scene", "weight": 2 }, { "content": "The camera is steady and the view looks down on the table from above.", "type": "camera", "weight": 3 } ] }, { "event": "The two hands of the person standing behind the table enters the frame from the top of the screen. The person picks up the pen on the right side of the table and removes the cap. The person then holds the paper with the left hand and starts writing with the pen on the blank paper with the right hand. The person writes the letters \"HEN\" from left to right on the blank paper. After writing, the person places the pen back on the right side of the table and then holds up the paper with the letters on it to show in front of the camera. The person then places the paper back on the table.", "visual_elements": [ { "content": "The two hands of the person standing behind the table enters the frame from the top of the screen.", "type": "action", "weight": 3 }, { "content": "The person picks up the pen on the right side of the table and removes the cap.", "type": "action", "weight": 3 }, { "content": "The person holds the paper with the left hand and starts writing with the pen on the blank paper with the right hand.", "type": "action", "weight": 3 }, { "content": "The person writes the letters \"HEN\" from left to right on the blank paper.", "type": "action", "weight": 3 }, { "content": "After writing, the person places the pen back on the right side of the table.", "type": "action", "weight": 2 }, { "content": "The person holds up the paper with the letters on it to show in front of the camera.", "type": "action", "weight": 3 }, { "content": "The person places the paper back on the table.", "type": "action", "weight": 2 } ] }, { "event": "The person's right hand brings in a green \"H\" letter toy from the left side of the frame and places it under the letter \"H\" written on the paper. Then, the right hand brings in a pink \"E\" toy from the left side of the frame and places it to the right of the green \"H\" letter. Finally, the right hand brings in a blue \"N\" toy from the left side of the frame and places it to the right of the pink \"E\". The right hand leaves the frame.", "visual_elements": [ { "content": "The person's right hand brings in a green \"H\" letter toy from the left side of the frame and places it under the letter \"H\" written on the paper.", "type": "action", "weight": 3 }, { "content": "The right hand brings in a pink \"E\" toy from the left side of the frame and places it to the right of the green \"H\" letter.", "type": "action", "weight": 3 }, { "content": "Finally, the right hand brings in a blue \"N\" toy from the left side of the frame and places it to the right of the pink \"E\".", "type": "action", "weight": 3 }, { "content": "The right hand leaves the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0527", "video_path": "PerceptionTest/video_3388.mp4", "video_source": "PerceptionTest", "visual_characteristic": "High-Dynamic", "domain": "Daily Life", "duration": 20.75, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a person wearing a short floral patterned top and yellow shorts. In the background is a room. A bed with a geometrically patterned sheet is in the room. The top half of the walls are green and the bottom half of the walls have marbling. The door is a dark wooden door. The person stands behind a glass table with a rosy red pen and a white piece of paper on it.\nThe person picks up the pink pen with the right hand. As the left hand presses on the paper, the right hand begins to write on the white paper. The tip of the pen does not touch the paper while writing. After writing, the young man puts down the pen and picks up the blank paper to show it to the camera. There are two round holes on the left side of the paper. The person then places the paper back on the table and takes out two coloured letter cards from his trouser pocket with his right hand. The person then places the two cards on the table with a blue letter \"F\" on the left and a yellow letter \"T\" on the right. Finally, the person reaches the right hand to the camera and moves from right to left in front of the camera.", "events": [ { "event": "The video begins with the camera focusing on a person wearing a short floral patterned top and yellow shorts. In the background is a room. A bed with a geometrically patterned sheet is in the room. The top half of the walls are green and the bottom half of the walls have marbling. The door is a dark wooden door. The person stands behind a glass table with a rosy red pen and a white piece of paper on it.", "visual_elements": [ { "content": "The camera focuses on a person wearing a short floral patterned top and yellow shorts.", "type": "camera", "weight": 3 }, { "content": "In the background is a room.", "type": "scene", "weight": 2 }, { "content": "A bed with a geometrically patterned sheet is in the room.", "type": "scene", "weight": 2 }, { "content": "The top half of the walls are green and the bottom half of the walls have marbling.", "type": "scene", "weight": 2 }, { "content": "The door is a dark wooden door.", "type": "scene", "weight": 1 }, { "content": "The person stands behind a glass table with a rosy red pen and a white piece of paper on it.", "type": "attribute", "weight": 3 } ] }, { "event": "The person picks up the pink pen with the right hand. As the left hand presses on the paper, the right hand begins to write on the white paper. The tip of the pen does not touch the paper while writing. After writing, the young man puts down the pen and picks up the blank paper to show it to the camera. There are two round holes on the left side of the paper. The person then places the paper back on the table and takes out two coloured letter cards from his trouser pocket with his right hand. The person then places the two cards on the table with a blue letter \"F\" on the left and a yellow letter \"T\" on the right. Finally, the person reaches the right hand to the camera and moves from right to left in front of the camera.", "visual_elements": [ { "content": "The person picks up the pink pen with the right hand.", "type": "action", "weight": 3 }, { "content": "As the left hand presses on the paper, the right hand begins to write on the white paper.", "type": "action", "weight": 3 }, { "content": "The tip of the pen does not touch the paper while writing.", "type": "attribute", "weight": 2 }, { "content": "After writing, the young man puts down the pen and picks up the blank paper to show it to the camera.", "type": "action", "weight": 3 }, { "content": "There are two round holes on the left side of the paper.", "type": "attribute", "weight": 3 }, { "content": "The person places the paper back on the table.", "type": "action", "weight": 3 }, { "content": "The person takes out two coloured letter cards from his trouser pocket with his right hand.", "type": "action", "weight": 3 }, { "content": "The person places the two cards on the table.", "type": "action", "weight": 3 }, { "content": "A blue letter \"F\" is on the left, while a yellow letter \"T\" is on the right.", "type": "attribute", "weight": 3 }, { "content": "Finally, the person reaches the right hand to the camera and moves from right to left in front of the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0528", "video_path": "PerceptionTest/video_3413.mp4", "video_source": "PerceptionTest", "visual_characteristic": "Low-Dynamic", "domain": "Daily Life", "duration": 20.51, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a wooden table with a pink purse on the left side of the table. In front of the purse is a black water bottle and a white medicine bottle. On the right side of the table is a pink plush hat. There is a black pen on the left side of the hat. Behind the table is a person in black pajamas. The pajamas have white buttons and are open to see the red lining inside. In the background is a grey wall and a blue and white cloth behind the person.\nThe person behind the table sit down. The person grabs the neckline of the pyjamas with both hands and pulls it outwards. The person then begins to buckle the first button, from the top down. This person touches the second button, but does not button it. The person continues downward and buckles the third and fourth buttons. After buttoning, the person pulls his collar with both hands and then stands up to reaches for the camera with both hands to adjust it. The camera shifts slightly.", "events": [ { "event": "The video begins with the camera focusing on a wooden table with a pink purse on the left side of the table. In front of the purse is a black water bottle and a white medicine bottle. On the right side of the table is a pink plush hat. There is a black pen on the left side of the hat. Behind the table is a person in black pajamas. The pajamas have white buttons and are open to see the red lining inside. In the background is a grey wall and a blue and white cloth behind the person.", "visual_elements": [ { "content": "The camera focuses on a wooden table.", "type": "camera", "weight": 3 }, { "content": "A pink purse is on the left side of the table.", "type": "attribute", "weight": 3 }, { "content": "In front of the purse is a black water bottle and a white medicine bottle.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the table is a pink plush hat.", "type": "attribute", "weight": 3 }, { "content": "There is a black pen on the left side of the hat.", "type": "attribute", "weight": 3 }, { "content": "Behind the table is a person in black pajamas.", "type": "attribute", "weight": 3 }, { "content": "The pajamas have white buttons and are open to see the red lining inside.", "type": "attribute", "weight": 2 }, { "content": "In the background is a grey wall and a blue and white cloth behind the person.", "type": "attribute", "weight": 2 } ] }, { "event": "The person behind the table sit down. The person grabs the neckline of the pyjamas with both hands and pulls it outwards. The person then begins to buckle the first button, from the top down. This person touches the second button, but does not button it. The person continues downward and buckles the third and fourth buttons. After buttoning, the person pulls his collar with both hands and then stands up to reaches for the camera with both hands to adjust it. The camera shifts slightly.", "visual_elements": [ { "content": "The person behind the table sits down.", "type": "action", "weight": 2 }, { "content": "The person grabs the neckline of the pyjamas with both hands and pulls it outwards.", "type": "action", "weight": 3 }, { "content": "The person then begins to buckle the first button, from the top down.", "type": "action", "weight": 3 }, { "content": "This person touches the second button, but does not button it.", "type": "action", "weight": 3 }, { "content": "The person continues downward and buckles the third and fourth buttons.", "type": "action", "weight": 3 }, { "content": "After buttoning, the person pulls his collar with both hands.", "type": "action", "weight": 1 }, { "content": "The person then stands up to reaches for the camera with both hands to adjust it.", "type": "action", "weight": 3 }, { "content": "The camera shifts slightly.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0529", "video_path": "YouCook2/101/3rtzSsuJ4Ng/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 24.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a barbecue grill, and its lid already open. In the background, a red brick wall and windows are visible. A person dressed in a black-striped outfit appears on the right side of the picture, holding a white plate in his left hand, with several slices of raw meat placed on it.\nNext, The person uses his right hand to pick up a slice of raw meat from the plate and gently puts it on the baking tray of the barbecue grill. Then he repeats the action, taking a second slice from the plate and putting it beside the first. After that, he continues to take the third slice of raw meat from the plate and puts it on the baking tray.\nThe camera cuts to a shot of the grilling meat. It shows the meat slices gradually changing color on the grill pan. At this time, the person holds a black pair of tongs in his right hand and turns over the three slices of meat in turn. Each slice of meat is flipped over, revealing the already cooked part on the other side. Finally, the person pulls down the lid of the barbecue grill and closes it.\nLastly, the camera cuts to the fully cooked meat. Using the tongs, the person lifts a slice of the grilled meat and places it onto a plate lined with white parchment paper. The video stops just as the person is about to put the second slice of meat back on the plate.", "events": [ { "event": "At the beginning of the video, the camera is focused on a barbecue grill, and its lid already open. In the background, a red brick wall and windows are visible. A person dressed in a black-striped outfit appears on the right side of the picture, holding a white plate in his left hand, with several slices of raw meat placed on it.", "visual_elements": [ { "content": "The camera is focused on a barbecue grill.", "type": "camera", "weight": 3 }, { "content": "In the background, a red brick wall and windows are visible.", "type": "scene", "weight": 1 }, { "content": "A person dressed in a black-striped outfit appears on the right side of the picture.", "type": "attribute", "weight": 3 }, { "content": "The person holds a white plate in his left hand.", "type": "attribute", "weight": 3 }, { "content": "There are several slices of raw meat on the plate.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, The person uses his right hand to pick up a slice of raw meat from the plate and gently puts it on the baking tray of the barbecue grill. Then he repeats the action, taking a second slice from the plate and putting it beside the first. After that, he continues to take the third slice of raw meat from the plate and puts it on the baking tray.", "visual_elements": [ { "content": "The person uses his right hand to pick up a slice of raw meat from the plate.", "type": "action", "weight": 3 }, { "content": "The person gently puts it on the baking tray of the barbecue grill.", "type": "action", "weight": 3 }, { "content": "The person takes a second slice from the plate again and puts it beside the first.", "type": "action", "weight": 3 }, { "content": "The person continues to take the third slice of raw meat from the plate", "type": "action", "weight": 3 }, { "content": "The person puts it on the baking tray.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a shot of the grilling meat. It shows the meat slices gradually changing color on the grill pan. At this time, the person holds a black pair of tongs in his right hand and turns over the three slices of meat in turn. Each slice of meat is flipped over, revealing the already cooked part on the other side. Finally, the person pulls down the lid of the barbecue grill and closes it.", "visual_elements": [ { "content": "The camera cuts to a shot of the grilling meat.", "type": "camera", "weight": 3 }, { "content": "It shows the meat slices gradually changing color on the grill pan.", "type": "attribute", "weight": 2 }, { "content": "The person holds a black pair of tongs in his right hand.", "type": "attribute", "weight": 2 }, { "content": "The person turns over the three slices of meat in turn.", "type": "action", "weight": 3 }, { "content": "Each slice of meat is flipped over, revealing the already cooked part on the other side.", "type": "attribute", "weight": 2 }, { "content": "The person pulls down the lid of the barbecue grill and closes it.", "type": "action", "weight": 3 } ] }, { "event": "Lastly, the camera cuts to the fully cooked meat. Using the tongs, the person lifts a slice of the grilled meat and places it onto a plate lined with white parchment paper. The video stops just as the person is about to put the second slice of meat back on the plate.", "visual_elements": [ { "content": "The camera cuts to the fully cooked meat.", "type": "camera", "weight": 3 }, { "content": "The person lifts a slice of the grilled meat by using the tongs and places it onto a plate lined with white parchment paper.", "type": "action", "weight": 3 }, { "content": "The video stops just as the person is about to put the second slice of meat back on the plate.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0530", "video_path": "YouCook2/101/3rtzSsuJ4Ng/split_4.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 25.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a table with several items on it. On the left side of the picture, there is a slice of toasted bread on a plate. Under the bread is a piece of tissue paper with some oil stains on it. On the right side of the picture, there is a plate with several lettuce leaves on it. There is a pair of tongs in the lower left corner of the picture. In the middle of the table, there is a plate containing bread slices with tomatoes and meat. A person is standing behind the table, and his hands appear in the picture.\nThe person is dressed in dark clothes.  His left hand is holding a small bowl, and his right hand is holding a spoon. The small bowl contains yellow sauce. The person repeatedly scoops up several spoonfuls of sauce with the spoon and spreads it on the food in the middle of the plate. After spreading the sauce, the person puts the small bowl and spoon beside the plate on the right side of the picture. Then, with his hands, he picks up three lettuce leaves from the plate at one time and places them on the tomato slices with spread sauce.\nThen, the camera cuts to the person holding a slice of bread with both hands. The person places this slice of bread on top of the lettuce.", "events": [ { "event": "At the beginning of the video, the camera is focused on a table with several items on it. On the left side of the picture, there is a slice of toasted bread on a plate. Under the bread is a piece of tissue paper with some oil stains on it. On the right side of the picture, there is a plate with several lettuce leaves on it. There is a pair of tongs in the lower left corner of the picture. In the middle of the table, there is a plate containing bread slices with tomatoes and meat. A person is standing behind the table, and his hands appear in the picture.", "visual_elements": [ { "content": "The camera is focused on a table.", "type": "camera", "weight": 3 }, { "content": "On the left side of the picture, there is a slice of toasted bread on a plate. Under the bread is a piece of tissue paper with some oil stains on it.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the picture, there is a plate with several lettuce leaves on it.", "type": "attribute", "weight": 2 }, { "content": "There is a pair of tongs in the lower left corner of the picture.", "type": "attribute", "weight": 2 }, { "content": "In the middle of the table, there is a plate containing bread slices with tomatoes and meat.", "type": "attribute", "weight": 2 }, { "content": "A person is standing behind the table, and his hands appear in the picture.", "type": "attribute", "weight": 2 } ] }, { "event": "The person is dressed in dark clothes.  His left hand is holding a small bowl, and his right hand is holding a spoon. The small bowl contains yellow sauce. The person repeatedly scoops up several spoonfuls of sauce with the spoon and spreads it on the food in the middle of the plate. After spreading the sauce, the person puts the small bowl and spoon beside the plate on the right side of the picture. Then, with his hands, he picks up three lettuce leaves from the plate at one time and places them on the tomato slices with spread sauce.", "visual_elements": [ { "content": "His left hand is holding a small bowl, and his right hand is holding a spoon.", "type": "attribute", "weight": 3 }, { "content": "The small bowl contains yellow sauce.", "type": "attribute", "weight": 1 }, { "content": "The person repeatedly scoops up several spoonfuls of sauce with the spoon and spreads it on the food in the middle of the plate.", "type": "action", "weight": 3 }, { "content": "After spreading the sauce, the person puts the small bowl and spoon beside the plate on the right side of the picture.", "type": "action", "weight": 3 }, { "content": "The person picks up three lettuce leaves from the plate with his hands at one time and places them on the tomato slices with spread sauce.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera cuts to the person holding a slice of bread with both hands. The person places this slice of bread on top of the lettuce.", "visual_elements": [ { "content": "The camera cuts to the person holding a slice of bread with both hands.", "type": "camera", "weight": 3 }, { "content": "The person places this slice of bread on top of the lettuce.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0531", "video_path": "YouCook2/101/VcIF3HOI9wI/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 22.0, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a four-burner gas stove, with a black frying pan sitting on it. Inside the frying pan, there are several slices of bacon. A man, dressed in a dark short-sleeved T-shirt, appears on the scene and is putting the bacon into the frying pan with his hands. The background is a kitchen, with a green spice jar and a towel hanging next to the stove. With his left hand, the man lifts the bacon slices with a small knife, while his right hand flips each slice one by one. The surface of the bacon has taken on an appetizing golden hue, with some edges starting to curl up.\nAt the end of the video, the camera cuts to a slightly shaky shot. The man's fingers gently press down on the bacon. Finally, he pinches up a corner of one bacon slice with his fingers.", "events": [ { "event": "At the beginning of the video, the camera focuses on a four-burner gas stove, with a black frying pan sitting on it. Inside the frying pan, there are several slices of bacon. A man, dressed in a dark short-sleeved T-shirt, appears on the scene and is putting the bacon into the frying pan with his hands. The background is a kitchen, with a green spice jar and a towel hanging next to the stove. With his left hand, the man lifts the bacon slices with a small knife, while his right hand flips each slice one by one. The surface of the bacon has taken on an appetizing golden hue, with some edges starting to curl up.", "visual_elements": [ { "content": "The camera focuses on a four-burner gas stove.", "type": "camera", "weight": 3 }, { "content": "A black frying pan sits on the gas stove.", "type": "attribute", "weight": 3 }, { "content": "In the frying pan, there are a few slices of bacon.", "type": "attribute", "weight": 3 }, { "content": "A man appears on the scene.", "type": "attribute", "weight": 3 }, { "content": "The man is dressed in a dark short-sleeved T-shirt.", "type": "attribute", "weight": 1 }, { "content": "The man is putting the bacon into the frying pan with his hands.", "type": "action", "weight": 3 }, { "content": "The background is a kitchen, with a green spice jar and a towel hanging next to the stove.", "type": "scene", "weight": 1 }, { "content": "With his left hand, the man lifts the bacon slices with a small knife, while his right hand flips each slice one by one.", "type": "action", "weight": 3 }, { "content": "The surface of the bacon has taken on an appetizing golden hue, with some edges starting to curl up.", "type": "attribute", "weight": 2 } ] }, { "event": "At the end of the video, the camera cuts to a slightly shaky shot. The man's fingers gently press down on the bacon. Finally, he pinches up a corner of one bacon slice with his fingers.", "visual_elements": [ { "content": "the camera cuts to a slightly shaky shot.", "type": "camera", "weight": 3 }, { "content": "The man's fingers gently press down on the bacon.", "type": "action", "weight": 3 }, { "content": "The man pinches up a corner of one bacon slice with his fingers.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0532", "video_path": "YouCook2/101/VcIF3HOI9wI/split_8.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 14.0, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a wooden cutting board with several pieces of lettuce, tomato slices, a bottle of mayonnaise, and two slices of toasted bread spread with butter. A pair of hands appear in the frame.\nFirst, the hands pick up a piece of lettuce and gently place it on one of the slices of toasted bread, covering the bacon that's already there. Then, the hands pick up another piece of lettuce and put it on top of the first one, ensuring even coverage.\nNext, the hands start picking up the cut tomato slices and placing them one by one on top of the lettuce. They carefully arrange each slice, making sure they are evenly distributed over the lettuce. The hands continue adding tomato slices until the lettuce is covered with multiple slices.\nAfter that, the hands pick up the other slice of toasted bread and carefully place it on top of the lettuce and tomato, forming a complete sandwich. They gently press down on the sandwich.\nA person is standing behind the table, and his hands appear in the picture.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wooden cutting board with several pieces of lettuce, tomato slices, a bottle of mayonnaise, and two slices of toasted bread spread with butter. A pair of hands appear in the frame.", "visual_elements": [ { "content": "The camera focuses on a wooden cutting board.", "type": "camera", "weight": 3 }, { "content": "There are several pieces of lettuce, tomato slices, a bottle of mayonnaise, and two slices of toasted bread spread with butter on the cutting board.", "type": "scene", "weight": 3 }, { "content": "A pair of hands appear in the frame.", "type": "scene", "weight": 3 } ] }, { "event": "First, the hands pick up a piece of lettuce and gently place it on one of the slices of toasted bread, covering the bacon that's already there. Then, the hands pick up another piece of lettuce and put it on top of the first one, ensuring even coverage.", "visual_elements": [ { "content": "The hands pick up a piece of lettuce and gently place it on one of the slices of toasted bread, covering the bacon that's already there.", "type": "action", "weight": 3 }, { "content": "The hands pick up another piece of lettuce and put it on top of the first one, ensuring even coverage.", "type": "action", "weight": 3 } ] }, { "event": "Next, the hands start picking up the cut tomato slices and placing them one by one on top of the lettuce. They carefully arrange each slice, making sure they are evenly distributed over the lettuce. The hands continue adding tomato slices until the lettuce is covered with multiple slices.", "visual_elements": [ { "content": "The hands start picking up the cut tomato slices and placing them one by one on top of the lettuce.", "type": "action", "weight": 3 }, { "content": "The hands carefully arrange each slice, making sure they are evenly distributed over the lettuce.", "type": "action", "weight": 3 }, { "content": "The hands continue adding tomato slices until the lettuce is covered with multiple slices.", "type": "action", "weight": 3 } ] }, { "event": "After that, the hands pick up the other slice of toasted bread and carefully place it on top of the lettuce and tomato, forming a complete sandwich. They gently press down on the sandwich.", "visual_elements": [ { "content": "The hands pick up the other slice of toasted bread.", "type": "action", "weight": 3 }, { "content": "The hands carefully place it on top of the lettuce and tomato, forming a complete sandwich.", "type": "action", "weight": 3 } ] }, { "event": "A person is standing behind the table, and his hands appear in the picture.", "visual_elements": [ { "content": "The hands move away from the sandwich and slowly spread his palms, displaying the finished sandwich.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 11 }, { "index": "TUNA_0533", "video_path": "YouCook2/101/bQ3QYyFglc0/split_10.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 9.04, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a plate of sandwiches placed on a table. The top of the sandwich is covered with lettuce leaves, tomato slices, cheese slices, and some sauce. The background is a white tabletop.\nNext, an adult male hand enters the frame from the left. He picks up a large slice of bread and gently places it on top of the sandwich. Then, he reaches forward and covers the bottom of the sandwich with another slice of bread.\nAfter that, the man's hands start pressing down on the bread slices to tightly combine them with the ingredients below. Subsequently, he turns the plate halfway around, and his hands continue pressing down on the bread slices.", "events": [ { "event": "At the beginning of the video, the camera focuses on a plate of sandwiches placed on a table. The top of the sandwich is covered with lettuce leaves, tomato slices, cheese slices, and some sauce. The background is a white tabletop.", "visual_elements": [ { "content": "The camera focuses on a plate of sandwiches placed on a table.", "type": "camera", "weight": 3 }, { "content": "The top of the sandwich is covered with lettuce leaves, tomato slices, cheese slices, and some sauce.", "type": "attribute", "weight": 3 }, { "content": "The background is a white tabletop.", "type": "scene", "weight": 1 } ] }, { "event": "Next, an adult male hand enters the frame from the left. He picks up a large slice of bread and gently places it on top of the sandwich. Then, he reaches forward and covers the bottom of the sandwich with another slice of bread.", "visual_elements": [ { "content": "An adult male hand enters the frame from the left.", "type": "action", "weight": 3 }, { "content": "The man picks up a large slice of bread and gently places it on top of the sandwich.", "type": "action", "weight": 3 }, { "content": "The man reaches forward and covers the bottom of the sandwich with another slice of bread.", "type": "action", "weight": 3 } ] }, { "event": "After that, the man's hands start pressing down on the bread slices to tightly combine them with the ingredients below. Subsequently, he turns the plate halfway around, and his hands continue pressing down on the bread slices.", "visual_elements": [ { "content": "The man's hands start pressing down on the bread slices to tightly combine them with the ingredients below.", "type": "action", "weight": 3 }, { "content": "The man turns the plate halfway around.", "type": "action", "weight": 3 }, { "content": "The man's hands continue pressing down on the bread slices.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 9 }, { "index": "TUNA_0534", "video_path": "YouCook2/101/nbiOaHaKuKs/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 13.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a chicken sandwich. The close-up shot of the sandwich shows the ingredients inside such as chicken, tomatoes, and lettuce.\nNext, the picture cuts to a wooden cutting board. On the cutting board are two cooked chicken breasts. The subtitle prompts \"shred 2 large poached chicken breasts into strips\". A hand holds a fork and starts to shred the chicken breast. The fork is inserted into the chicken breast and gently pulls to shred the chicken into strips.\nThe camera cuts again. The chicken is shredded into thin strips and piled up on the cutting board. Subsequently, a small window appears in the lower right corner of the picture, showing the process of cooking chicken breasts. The window shows a pot with several chicken breasts boiling in water. The subtitle prompts \"Click here to watch how to poach chicken\".", "events": [ { "event": "At the beginning of the video, the camera focuses on a chicken sandwich. The close-up shot of the sandwich shows the ingredients inside such as chicken, tomatoes, and lettuce.", "visual_elements": [ { "content": "The camera focuses on a chicken sandwich.", "type": "camera", "weight": 3 }, { "content": "The close-up shot of the sandwich shows the ingredients inside such as chicken, tomatoes, and lettuce.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, the picture cuts to a wooden cutting board. On the cutting board are two cooked chicken breasts. The subtitle prompts \"shred 2 large poached chicken breasts into strips\". A hand holds a fork and starts to shred the chicken breast. The fork is inserted into the chicken breast and gently pulls to shred the chicken into strips.", "visual_elements": [ { "content": "The picture cuts to a wooden cutting board.", "type": "camera", "weight": 3 }, { "content": "There are two cooked chicken breasts on the cutting board.", "type": "attribute", "weight": 3 }, { "content": "The subtitle prompts \"shred 2 large poached chicken breasts into strips\".", "type": "scene", "weight": 1 }, { "content": "A hand holds a fork and starts to shred the chicken breast.", "type": "action", "weight": 3 }, { "content": "The fork is inserted into the chicken breast.", "type": "attribute", "weight": 3 }, { "content": "The fork gently pulls to shred the chicken into strips.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts again. The chicken is shredded into thin strips and piled up on the cutting board. Subsequently, a small window appears in the lower right corner of the picture, showing the process of cooking chicken breasts. The window shows a pot with several chicken breasts boiling in water. The subtitle prompts \"Click here to watch how to poach chicken\".", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The chicken is shredded into thin strips and piled up on the cutting board.", "type": "attribute", "weight": 3 }, { "content": "A small window appears in the lower right corner of the picture, showing the process of cooking chicken breasts.", "type": "scene", "weight": 2 }, { "content": "The window shows a pot with several chicken breasts boiling in water.", "type": "scene", "weight": 2 }, { "content": "The subtitle prompts \"Click here to watch how to poach chicken\".", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0535", "video_path": "YouCook2/101/ucky1nWb7LY/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 11.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a kitchen counter. A woman wearing a white short-sleeved T-shirt stands in front of the counter, with a modern kitchen as the backdrop, equipped with an oven, microwave, and various kitchen utensils. On the counter, there is a sliced avocado, a knife, and a transparent glass bowl. Next to it, a baking tray holds a few slices of toasted bread.\nThe camera moves up, and the woman picks up half of the avocado with her left hand while using a spoon with her right hand to scoop out the flesh, placing it into the glass bowl. The woman talks as she scoops out the avocado. The camera moves down, showing the woman repeatedly scooping out avocado flesh into the glass bowl with the spoon.\nAt the end of the video, the camera zooms in, focusing on the avocado flesh in the glass bowl. The woman places the last piece of flesh into the bowl and sets the avocado shell aside. With her left hand, she takes the spoon, and with her right hand, she picks up a fork to flick the avocado flesh on the spoon into the bowl.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen counter. A woman wearing a white short-sleeved T-shirt stands in front of the counter, with a modern kitchen as the backdrop, equipped with an oven, microwave, and various kitchen utensils. On the counter, there is a sliced avocado, a knife, and a transparent glass bowl. Next to it, a baking tray holds a few slices of toasted bread.", "visual_elements": [ { "content": "The camera is focused on a kitchen counter.", "type": "camera", "weight": 3 }, { "content": "A woman wearing a white short-sleeved T-shirt stands in front of the counter.", "type": "attribute", "weight": 3 }, { "content": "The background is a modern kitchen as the backdrop, equipped with an oven, microwave, and various kitchen utensils.", "type": "scene", "weight": 1 }, { "content": "On the counter, there is a wooden cutting board.", "type": "attribute", "weight": 1 }, { "content": "On the cutting board, there are two halves of a sliced avocado and a knife with a black handle.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the cutting board, on the tabletop, there lies a knife with a white handle.", "type": "attribute", "weight": 2 }, { "content": "In front of the cutting board, there is a glass bowl.", "type": "attribute", "weight": 2 }, { "content": "There are a few slices of toasted bread a baking tray holds.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves up, and the woman picks up half of the avocado with her left hand while using a spoon with her right hand to scoop out the flesh, placing it into the glass bowl. The woman talks as she scoops out the avocado. The camera moves down, showing the woman repeatedly scooping out avocado flesh into the glass bowl with the spoon.", "visual_elements": [ { "content": "The camera moves up.", "type": "camera", "weight": 3 }, { "content": "The woman picks up half of the avocado with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman usES a spoon with her right hand to scoop out the flesh of the avocado.", "type": "action", "weight": 3 }, { "content": "The woman talks as she scoops out the avocado.", "type": "action", "weight": 3 }, { "content": "The camera moves down.", "type": "camera", "weight": 3 }, { "content": "The woman repeatedly scoops out avocado flesh into the glass bowl with the spoon.", "type": "action", "weight": 3 } ] }, { "event": "At the end of the video, the camera zooms in, focusing on the avocado flesh in the glass bowl. The woman places the last piece of flesh into the bowl and sets the avocado shell aside. With her left hand, she takes the spoon, and with her right hand, she picks up a fork to flick the avocado flesh on the spoon into the bowl.", "visual_elements": [ { "content": "The camera zooms in, focusing on the avocado flesh in the glass bowl.", "type": "camera", "weight": 3 }, { "content": "The woman places the last piece of flesh into the bowl。", "type": "action", "weight": 3 }, { "content": "The woman sets the avocado shell aside.", "type": "action", "weight": 3 }, { "content": "The woman takes the spoon with her left hand.", "type": "action", "weight": 3 }, { "content": "The woman picks up a fork with her right hand to flick the avocado flesh on the spoon into the bowl.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0536", "video_path": "YouCook2/101/ucky1nWb7LY/split_8.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 16.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on the kitchen countertop, with a black oven and cupboards visible in the background. A woman wearing a white short-sleeved shirt stands in front of the countertop. There is a wooden cutting board on the operating table. On the countertop, there are a few slices of toasted bread with green spread already applied and sliced tomatoes. In front of the cutting board, a glass bowl containing green spread sits with a metal spoon inside. A plate beside the bowl holds several slices of ripe avocado.\nThe woman's right hand reaches forward, picking up a slice of bacon and placing it on the tomato slice on the bread. Subsequently, the camera shifts to the right and upwards as the woman continues to take bacon from a plate on the left side of the frame and put it on the tomato slices on the bread.\nThe camera switches to the bread with bacon already arranged, and begins to move backwards. Then, she picks up the sliced avocado from the plate and places it on the slice of bread without bacon. The camera gradually moves forward, capturing the woman repeatedly placing the avocado slices neatly on the bread.", "events": [ { "event": "At the beginning of the video, the camera focuses on the kitchen countertop, with a black oven and cupboards visible in the background. A woman wearing a white short-sleeved shirt stands in front of the countertop. There is a wooden cutting board on the operating table. On the countertop, there are a few slices of toasted bread with green spread already applied and sliced tomatoes. In front of the cutting board, a glass bowl containing green spread sits with a metal spoon inside. A plate beside the bowl holds several slices of ripe avocado.", "visual_elements": [ { "content": "The camera focuses on the kitchen countertop。", "type": "camera", "weight": 3 }, { "content": "A black oven and cupboards is visible in the background.", "type": "scene", "weight": 1 }, { "content": "A woman wearing a white short-sleeved shirt stands in front of the countertop.", "type": "attribute", "weight": 3 }, { "content": "There is a wooden cutting board on the operating table.", "type": "attribute", "weight": 2 }, { "content": "On the countertop, there are a few slices of toasted bread with green spread already applied and sliced tomatoes.", "type": "attribute", "weight": 3 }, { "content": "In front of the cutting board, , a glass bowl containing green spread sits with a metal spoon inside.", "type": "attribute", "weight": 2 }, { "content": "A plate beside the bowl holds several slices of ripe avocado.", "type": "attribute", "weight": 2 } ] }, { "event": "The woman's right hand reaches forward, picking up a slice of bacon and placing it on the tomato slice on the bread. Subsequently, the camera shifts to the right and upwards as the woman continues to take bacon from a plate on the left side of the frame and put it on the tomato slices on the bread.", "visual_elements": [ { "content": "The woman's right hand reaches forward,", "type": "action", "weight": 3 }, { "content": "The woman picks up a slice of bacon and placing it on the tomato slice on the bread.", "type": "action", "weight": 3 }, { "content": "The camera shifts to the right and upwards.", "type": "action", "weight": 3 }, { "content": "The woman continues to take bacon from a plate on the left side of the frame and put it on the tomato slices on the bread.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to the bread with bacon already arranged, and begins to move backwards. Then, she picks up the sliced avocado from the plate and places it on the slice of bread without bacon. The camera gradually moves forward, capturing the woman repeatedly placing the avocado slices neatly on the bread.", "visual_elements": [ { "content": "The camera switches to the bread with bacon already arranged.", "type": "camera", "weight": 3 }, { "content": "The camera begins to move backwards.", "type": "camera", "weight": 3 }, { "content": "The woman picks up the sliced avocado from the plate and places it on the slice of bread without bacon.", "type": "action", "weight": 3 }, { "content": "The camera gradually moves forward.", "type": "camera", "weight": 3 }, { "content": "The woman repeats the action of neatly placing avocado slices on the bread.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0537", "video_path": "YouCook2/102/0Mz4NTozNXw/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 10.04, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an empty circular baking dish placed on a wooden cutting board. Suddenly, a silver bag appears above the frame, tilting and pouring rice grains into the baking dish continuously. After the bag stops pouring, it exits the frame. The rice grains in the baking dish form a small hill. A hand enters from the left side of the frame, gripping the side of the baking dish and gently shaking it, spreading the rice grains evenly across the dish.\nThe camera then switches to a whole onion placed on the same wooden cutting board, with a knife visible in the background. The top and bottom of the onion have been cut off, revealing a smooth white surface.", "events": [ { "event": "At the beginning of the video, the camera focuses on an empty circular baking dish placed on a wooden cutting board. Suddenly, a silver bag appears above the frame, tilting and pouring rice grains into the baking dish continuously. After the bag stops pouring, it exits the frame. The rice grains in the baking dish form a small hill. A hand enters from the left side of the frame, gripping the side of the baking dish and gently shaking it, spreading the rice grains evenly across the dish.", "visual_elements": [ { "content": "The camera focuses on an empty circular baking dish.", "type": "camera", "weight": 3 }, { "content": "The baking dish is placed on a wooden cutting board.", "type": "scene", "weight": 3 }, { "content": "A silver bag appears above the frame suddenly.", "type": "attribute", "weight": 2 }, { "content": "The bag is tilted and grains of rice inside continuously fall into the baking tray continuously..", "type": "action", "weight": 3 }, { "content": "After the bag stops pouring, it exits the frame.", "type": "action", "weight": 2 }, { "content": "The rice grains in the baking dish form a small hill.", "type": "attribute", "weight": 3 }, { "content": "A hand enters from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The hand grips the side of the baking dish and gently shaking it, spreading the rice grains evenly across the dish.", "type": "action", "weight": 3 } ] }, { "event": "The camera then switches to a whole onion placed on the same wooden cutting board, with a knife visible in the background. The top and bottom of the onion have been cut off, revealing a smooth white surface.", "visual_elements": [ { "content": "The camera then switches to a whole onion.", "type": "camera", "weight": 3 }, { "content": "The onion is placed on the same wooden cutting board.", "type": "attribute", "weight": 3 }, { "content": "A knife is visible in the background.", "type": "scene", "weight": 1 }, { "content": "The top and bottom of the onion have been cut off, revealing a smooth white surface.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0538", "video_path": "YouCook2/102/4B6j3gYkvr4/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 11.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "In the video, a woman is cooking in the kitchen. She wears a purple long-sleeved shirt and a gray vest, with a white apron tied around her waist. She stands at the kitchen countertop, where two transparent glass bowls are placed, containing flour and sliced onion rings in the left one.\nIn the background of the kitchen, cupboards, a microwave, and a stove are visible. There is a red pot on the stove filled with hot oil. The countertop also holds some kitchen utensils, such as knives, spoons, and a vase.\nIn the video, the woman places the flour-coated onion rings into the hot oil in the red pot for frying. The camera zooms in and moves to the left, revealing the boiling oil and the onion rings flipping in it. She continuously repeats this action, placing more onion rings into the oil, while talking with her head down. Finally, the camera moves to the right.", "events": [ { "event": "In the video, a woman is cooking in the kitchen. She wears a purple long-sleeved shirt and a gray vest, with a white apron tied around her waist. She stands at the kitchen countertop, where two transparent glass bowls are placed, containing flour and sliced onion rings in the left one.", "visual_elements": [ { "content": "A woman is cooking in the kitchen.", "type": "scene", "weight": 3 }, { "content": "The woman wears a purple long-sleeved shirt and a gray vest, with a white apron tied around her waist.", "type": "attribute", "weight": 1 }, { "content": "The woman stands at the kitchen countertop.", "type": "attribute", "weight": 3 }, { "content": "There are two transparent glass bowls on the countertop, and the left one contains flour and sliced onion rings.", "type": "attribute", "weight": 3 } ] }, { "event": "In the background of the kitchen, cupboards, a microwave, and a stove are visible. There is a red pot on the stove filled with hot oil. The countertop also holds some kitchen utensils, such as knives, spoons, and a vase.", "visual_elements": [ { "content": "In the background of the kitchen, cupboards, a microwave, and a stove are visible.", "type": "scene", "weight": 1 }, { "content": "There is a red pot on the stove filled with hot oil on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The countertop behind the woman also holds some kitchen utensils, such as knives, spoons, and a vase.", "type": "scene", "weight": 1 } ] }, { "event": "In the video, the woman places the flour-coated onion rings into the hot oil in the red pot for frying. The camera zooms in and moves to the left, revealing the boiling oil and the onion rings flipping in it. She continuously repeats this action, placing more onion rings into the oil, while talking with her head down. Finally, the camera moves to the right.", "visual_elements": [ { "content": "The woman places the flour-coated onion rings into the hot oil in the red pot for frying.", "type": "action", "weight": 3 }, { "content": "The camera zooms in and moves to the left.", "type": "camera", "weight": 3 }, { "content": "It can be seen that the oil in the oil pan is boiling and the onion rings are tumbling in the oil.", "type": "attribute", "weight": 3 }, { "content": "The woman continuously repeats this action, placing more onion rings into the oil", "type": "action", "weight": 3 }, { "content": "The woman talks with her head down while doing these actions.", "type": "action", "weight": 2 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0539", "video_path": "YouCook2/102/EP2OBrPPWtU/split_7.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 15.06, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a pot filled with oil. The bottom of the pot has some discoloration and spots. A hand continuously places four onion rings into the boiling oil in the pot, where the hot oil bubbles around the onion rings.\nSubsequently, the camera zooms in to show a hand holding a pair of tongs, which are used to flip the round objects in the oil.\nThen, the scene shifts to a new setting and slowly zooms in to reveal a wooden cutting board with a stack of fried onion rings on it. In the background, there are two whole onions visible.", "events": [ { "event": "The video begins with the camera focused on a pot filled with oil. The bottom of the pot has some discoloration and spots. A hand continuously places four onion rings into the boiling oil in the pot, where the hot oil bubbles around the onion rings.", "visual_elements": [ { "content": "The camera is focused on a pot filled with oil.", "type": "camera", "weight": 3 }, { "content": "The bottom of the pot has some discoloration and spots.", "type": "attribute", "weight": 1 }, { "content": "A hand continuously places four onion rings into the boiling oil in the pot.", "type": "action", "weight": 3 }, { "content": "The hot oil in the oil pot is boiling. There are small bubbles of hot oil around the onion rings.", "type": "attribute", "weight": 3 } ] }, { "event": "Subsequently, the camera zooms in to show a hand holding a pair of tongs, which are used to flip the round objects in the oil.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "One hand holds a clip to turn the onion rings in the frying pan.", "type": "action", "weight": 3 } ] }, { "event": "Then, the scene shifts to a new setting and slowly zooms in to reveal a wooden cutting board with a stack of fried onion rings on it. In the background, there are two whole onions visible.", "visual_elements": [ { "content": "The scene shifts to a new setting and slowly zooms in.", "type": "camera", "weight": 3 }, { "content": "The screen shows a wooden cutting board.", "type": "attribute", "weight": 2 }, { "content": "There are a stack of fried onion rings on the cutting board.", "type": "attribute", "weight": 3 }, { "content": "In the background, there are two whole onions visible.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0540", "video_path": "YouCook2/102/dmJB7NHZ2cI/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 20.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a kitchen scene. The background of the kitchen features dark wooden cabinets and white walls, with a microwave built into the cabinets. A woman stands at the kitchen counter, wearing a white short-sleeved shirt with black patterns, her hair tied up in a ponytail. She holds a white bowl in both hands. Then she reaches out her right hand to a glass bowl on a nearby table.\nThe camera switches to a close-up shot of the cutting board on the counter. There are some sliced onion rings and onion chunks on the cutting board, along with a transparent glass bowl. The woman's hand holds a small white bowl, pouring a white powder into the glass bowl. A pink knife can be seen nearby. Next, the camera continues with a closer detail shot, showing more white powder being poured into the glass bowl.\nThe camera switches again to an even closer detail shot. Some red powder is sprinkled into the glass bowl, and more white powder and granular substances are poured into. A hand holds some beige powder and puts it into the glass bowl.", "events": [ { "event": "At the beginning of the video, the camera focuses on a kitchen scene. The background of the kitchen features dark wooden cabinets and white walls, with a microwave built into the cabinets. A woman stands at the kitchen counter, wearing a white short-sleeved shirt with black patterns, her hair tied up in a ponytail. She holds a white bowl in both hands. Then she reaches out her right hand to a glass bowl on a nearby table.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "The background of the kitchen features dark wooden cabinets and white walls, with a microwave built into the cabinets.", "type": "scene", "weight": 1 }, { "content": "A woman stands at the kitchen counter, wearing a white short-sleeved shirt with black patterns, her hair tied up in a ponytail.", "type": "attribute", "weight": 3 }, { "content": "The woman holds a white bowl in both hands.", "type": "attribute", "weight": 3 }, { "content": "The woman reaches out her right hand to a glass bowl on a nearby table.", "type": "action", "weight": 1 } ] }, { "event": "The camera switches to a close-up shot of the cutting board on the counter. There are some sliced onion rings and onion chunks on the cutting board, along with a transparent glass bowl. The woman's hand holds a small white bowl, pouring a white powder into the glass bowl. A pink knife can be seen nearby. Next, the camera continues with a closer detail shot, showing more white powder being poured into the glass bowl.", "visual_elements": [ { "content": "The camera switches to a close-up shot of the cutting board on the counter.", "type": "camera", "weight": 3 }, { "content": "There are some sliced onion rings and onion chunks on the cutting board, along with a transparent glass bowl on the cutting board.", "type": "attribute", "weight": 3 }, { "content": "The woman's hand holds a small white bowl, pouring a white powder into the glass bowl.", "type": "action", "weight": 3 }, { "content": "A pink knife can be seen nearby.", "type": "scene", "weight": 1 }, { "content": "The woman re-enters the frame holding the small white bowl again, pouring the remaining white powder from the bowl into the glass bowl.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again to an even closer detail shot. Some red powder is sprinkled into the glass bowl, and more white powder and granular substances are poured into. A hand holds some beige powder and puts it into the glass bowl.", "visual_elements": [ { "content": "The camera switches again to an even closer detail shot.", "type": "camera", "weight": 3 }, { "content": "Some red powder is sprinkled into the glass bowl.", "type": "attribute", "weight": 2 }, { "content": "More white powder and granular substances are poured into the glass bowl.", "type": "scene", "weight": 3 }, { "content": "A hand holds some beige powder and puts it into the glass bowl.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0541", "video_path": "YouCook2/102/dmJB7NHZ2cI/split_6.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 9.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a black skillet with some oil inside. A hand hovers above the skillet, feeling the temperature of the oil.\nSubsequently, the camera switches to a close-up shot of a plate of fried onion rings. The onion rings are golden brown, with some covered in breadcrumbs and others having a smooth batter coating. The onion rings are neatly arranged on a white oil-absorbing paper placed on a wooden cutting board. Then, some white particles are sprinkled over the onion rings.", "events": [ { "event": "At the beginning of the video, the camera focuses on a black skillet with some oil inside. A hand hovers above the skillet, feeling the temperature of the oil.", "visual_elements": [ { "content": "The camera focuses on a black skillet.", "type": "camera", "weight": 3 }, { "content": "There are some oil in the skillet.", "type": "attribute", "weight": 2 }, { "content": "A hand hovers above the skillet, feeling the temperature of the oil.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera switches to a close-up shot of a plate of fried onion rings. The onion rings are golden brown, with some covered in breadcrumbs and others having a smooth batter coating. The onion rings are neatly arranged on a white oil-absorbing paper placed on a wooden cutting board. Then, some white particles are sprinkled over the onion rings.", "visual_elements": [ { "content": "The camera switches to a close-up shot of a plate of fried onion rings.", "type": "camera", "weight": 3 }, { "content": "The onion rings are golden brown, with some covered in breadcrumbs and others having a smooth batter coating.", "type": "attribute", "weight": 3 }, { "content": "The onion rings are neatly arranged on a white oil-absorbing paper placed on a wooden cutting board.", "type": "attribute", "weight": 2 }, { "content": "Some white particles are sprinkled over the onion rings.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0542", "video_path": "YouCook2/102/r1HcLnSTJHY/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 7.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video starts with a shot of a metal rack with several freshly fried onion rings placed on it. The onion rings are golden brown and crispy on the surface. A hand reaches down from above the frame, holding a metal spoon, and lifts the onion rings out of the nearby pot.\nNext, the camera switches to a close-up shot of a hand holding a golden onion ring against a blurred background.\nThen, the camera switches again, this time showing a wooden cutting board with several onion rings, a few carrots, and a small bowl of dipping sauce placed on it. A pair of hands reaches in from the right side of the frame and places two onion rings on the cutting board.\nSubsequently, the camera switches to a close-up shot of the onion rings on the cutting board. One of the onion rings wobbles.\nFinally, the camera switches to a close-up shot of a hand placing green leaves on top of the onion rings.", "events": [ { "event": "The video starts with a shot of a metal rack with several freshly fried onion rings placed on it. The onion rings are golden brown and crispy on the surface. A hand reaches down from above the frame, holding a metal spoon, and lifts the onion rings out of the nearby pot.", "visual_elements": [ { "content": "The camera is focused on a metal wire rack.", "type": "camera", "weight": 3 }, { "content": "There are everal freshly fried onion rings are placed on the wire rack.", "type": "attribute", "weight": 2 }, { "content": "The onion rings are golden brown and crispy on the surface.", "type": "attribute", "weight": 2 }, { "content": "A hand reaches down from above the frame.", "type": "action", "weight": 2 }, { "content": "The hand holds a metal spoon, and lifts the onion rings out of the nearby pot.", "type": "action", "weight": 1 } ] }, { "event": "Next, the camera switches to a close-up shot of a hand holding a golden onion ring against a blurred background.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "A hand holds a golden onion ring.", "type": "attribute", "weight": 3 }, { "content": "The background is blurred.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches again, this time showing a wooden cutting board with several onion rings, a few carrots, and a small bowl of dipping sauce placed on it. A pair of hands reaches in from the right side of the frame and places two onion rings on the cutting board.", "visual_elements": [ { "content": "The camera switches again, this time showing a wooden cutting board.", "type": "camera", "weight": 3 }, { "content": "There are several onion rings, a few carrots, and a small bowl of dipping sauce placed on the a wooden cutting board.", "type": "attribute", "weight": 3 }, { "content": "A pair of hands reaches in from the right side of the frame and places two onion rings on the cutting board.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera switches to a close-up shot of the onion rings on the cutting board. One of the onion rings wobbles.", "visual_elements": [ { "content": "The camera switches to a close-up shot of the onion rings on the cutting board.", "type": "camera", "weight": 3 }, { "content": "One of the onion rings on it wobbles.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera switches to a close-up shot of a hand placing green leaves on top of the onion rings.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "A hand places green leaves on top of the onion rings.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 15 }, { "index": "TUNA_0543", "video_path": "YouCook2/102/x41ze5sLnBY/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 18.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a glass bowl containing a white powdery substance. In the background, some sliced onion rings can be seen placed on a metal wire rack.\nThen, a hand appears in the top right corner of the frame, holding a pink measuring cup, pouring more white powdery substance into the glass bowl. Afterward, the hand taps the pink measuring cup on the edge of the glass bowl.\nNext, the camera zooms in closer. A metal spoon enters the frame from the top left corner, adding some white powder to the bowl. Subsequently, the spoon also adds green spices to the glass bowl. A metal whisk enters the frame from above and begins stirring the powdery substance and spices in the bowl.\nAfterward, the camera switches, showing that the white powder and green spices in the glass bowl have been already mixed. The whisk continues stirring the contents in the bowl, tapping it on the edge of the glass bowl.", "events": [ { "event": "At the beginning of the video, the camera focuses on a glass bowl containing a white powdery substance. In the background, some sliced onion rings can be seen placed on a metal wire rack.", "visual_elements": [ { "content": "The camera focuses on a glass bowl.", "type": "camera", "weight": 3 }, { "content": "Therer are a white powdery substance in the bowl.", "type": "attribute", "weight": 3 }, { "content": "In the background, some sliced onion rings can be seen placed on a metal wire rack.", "type": "scene", "weight": 1 } ] }, { "event": "Then, a hand appears in the top right corner of the frame, holding a pink measuring cup, pouring more white powdery substance into the glass bowl. Afterward, the hand taps the pink measuring cup on the edge of the glass bowl.", "visual_elements": [ { "content": "A hand appears in the top right corner of the frame.", "type": "action", "weight": 3 }, { "content": "A hand holds a pink measuring cup.", "type": "action", "weight": 3 }, { "content": "The hand pours more white powdery substance into the glass bowl.", "type": "action", "weight": 3 }, { "content": "The hand taps the pink measuring cup on the edge of the glass bowl.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera zooms in closer. A metal spoon enters the frame from the top left corner, adding some white powder to the bowl. Subsequently, the spoon also adds green spices to the glass bowl. A metal whisk enters the frame from above and begins stirring the powdery substance and spices in the bowl.", "visual_elements": [ { "content": "The camera zooms in closer.", "type": "camera", "weight": 3 }, { "content": "A metal spoon enters the frame from the top left corner.", "type": "action", "weight": 3 }, { "content": "Some white powder are added to the bowl by the spoon.", "type": "action", "weight": 3 }, { "content": "Green spices are added to the glass bowl by the spoon.", "type": "action", "weight": 3 }, { "content": "A metal whisk enters the frame from above.", "type": "action", "weight": 3 }, { "content": "The whisk begins stirring the powdery substance and spices in the bowl.", "type": "action", "weight": 3 } ] }, { "event": "Afterward, the camera switches, showing that the white powder and green spices in the glass bowl have been already mixed. The whisk continues stirring the contents in the bowl, tapping it on the edge of the glass bowl.", "visual_elements": [ { "content": "The camera switches.", "type": "camera", "weight": 3 }, { "content": "The white powder and green spices in the glass bowl have been already mixed.", "type": "attribute", "weight": 3 }, { "content": "The whisk continues stirring the contents in the bowl.", "type": "action", "weight": 3 }, { "content": "The whisk tapes on the edge of the glass bowl.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0544", "video_path": "YouCook2/102/xK8bGXuXO_Y/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 6.04, "resolution": { "width": 654, "height": 480 }, "caption": "At the beginning of the video, the camera focuses on a kitchen scene. The kitchen is bright and tidy, with green plants and windows in the background. Outside the window, some trees and sunlight can be seen. In the center of the kitchen is a wide countertop with various cooking utensils and ingredients placed on it. There is a knife block on the countertop, with several knives inserted, as well as bowls, measuring cups, and a red skillet.\nA middle-aged woman stands behind the countertop, wearing a light blue apron over her pink traditional attire. She wears a watch on her left hand and a bracelet on her right hand.\nThe woman reaches out her left hand to take the sliced onion rings in a glass bowl and speaks to the camera. Subsequently, she uses both hands to separate the onion rings that are stuck together, then looks up at the camera and speaks, shaking the onion rings in her hand.\nNext, the camera switches to a close-up shot of the woman's right hand holding an onion ring and flipping it in a glass bowl filled with batter.", "events": [ { "event": "At the beginning of the video, the camera focuses on a kitchen scene. The kitchen is bright and tidy, with green plants and windows in the background. Outside the window, some trees and sunlight can be seen. In the center of the kitchen is a wide countertop with various cooking utensils and ingredients placed on it. There is a knife block on the countertop, with several knives inserted, as well as bowls, measuring cups, and a red skillet.\nA middle-aged woman stands behind the countertop, wearing a light blue apron over her pink traditional attire. She wears a watch on her left hand and a bracelet on her right hand.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "The kitchen is bright and tidy, with green plants and windows in the background. Outside the window, some trees and sunlight can be seen.", "type": "scene", "weight": 2 }, { "content": "In the center of the kitchen is a wide countertop with various cooking utensils and ingredients placed on it.", "type": "attribute", "weight": 2 }, { "content": "There is a knife block on the countertop, with several knives inserted, as well as bowls, measuring cups, and a red skillet.", "type": "attribute", "weight": 2 }, { "content": "A middle-aged woman stands behind the countertop.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a light blue apron over her pink traditional attire. She wears a watch on her left hand and a bracelet on her right hand.", "type": "attribute", "weight": 2 }, { "content": "The woman wears a watch on her left hand and a bracelet on her right hand.", "type": "attribute", "weight": 1 } ] }, { "event": "The woman reaches out her left hand to take the sliced onion rings in a glass bowl and speaks to the camera. Subsequently, she uses both hands to separate the onion rings that are stuck together, then looks up at the camera and speaks, shaking the onion rings in her hand.", "visual_elements": [ { "content": "The woman reaches out her left hand to take the sliced onion rings in a glass bowl.", "type": "action", "weight": 3 }, { "content": "The woman speaks to the camera.", "type": "action", "weight": 3 }, { "content": "The woman uses both hands to separate the onion rings that are stuck together", "type": "action", "weight": 3 }, { "content": "The woman looks up at the camera and speaks", "type": "action", "weight": 3 }, { "content": "The woman shakes the onion rings in her hand.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to a close-up shot of the woman's right hand holding an onion ring and flipping it in a glass bowl filled with batter.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The woman's right hand holding an onion ring and flipping it in a glass bowl filled with batter.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0545", "video_path": "YouCook2/102/z1S8WkUoQtU/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 12.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The camera focuses on the edge of a transparent glass bowl. In the background, a person wearing a red outfit can be seen. The camera moves down, revealing that the person's left hand holds a small white bowl, and his right hand uses a spoon to pour several spoonfuls of flour into the glass bowl.\nThe camera switches. The person continues using the spoon to add flour to the glass bowl, and his left hand empties the flour from the white bowl into the glass bowl. In the background, some sliced onion rings can be seen placed on a wooden cutting board.\nThe camera switches again, revealing that the person wearing the red outfit is a woman. The woman prepares the ingredients while speaking to the camera.\nSubsequently, the camera switches back to the glass bowl and rotates it to the left. The woman's left hand holds a white bowl, and her right hand uses a spoon to scoop two spoonfuls of flour from the white bowl into the glass bowl. Then, the woman puts down the spoon and places the white bowl in front of the camera.\nFinally, the camera switches to a shot of the woman's upper torso. She looks at the camera and speaks.", "events": [ { "event": "The camera focuses on the edge of a transparent glass bowl. In the background, a person wearing a red outfit can be seen. The camera moves down, revealing that the person's left hand holds a small white bowl, and his right hand uses a spoon to pour several spoonfuls of flour into the glass bowl.", "visual_elements": [ { "content": "The camera focuses on the edge of a transparent glass bowl.", "type": "camera", "weight": 3 }, { "content": "In the background, a person wearing a red outfit can be seen.", "type": "attribute", "weight": 2 }, { "content": "The camera moves down.", "type": "camera", "weight": 3 }, { "content": "The person's left hand holds a small white bowl", "type": "action", "weight": 2 }, { "content": "His right hand uses a spoon to pour several spoonfuls of flour into the glass bowl.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches. The person continues using the spoon to add flour to the glass bowl, and his left hand empties the flour from the white bowl into the glass bowl. In the background, some sliced onion rings can be seen placed on a wooden cutting board.", "visual_elements": [ { "content": "The camera switches.", "type": "camera", "weight": 3 }, { "content": "The person continues using the spoon to add flour to the glass bowl.", "type": "action", "weight": 3 }, { "content": "The person's left hand empties the flour from the white bowl into the glass bowl", "type": "action", "weight": 3 }, { "content": "In the background, some sliced onion rings can be seen placed on a wooden cutting board.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches again, revealing that the person wearing the red outfit is a woman. The woman prepares the ingredients while speaking to the camera.", "visual_elements": [ { "content": "The camera switches again", "type": "camera", "weight": 3 }, { "content": "The person wearing the red outfit is a woman.", "type": "attribute", "weight": 3 }, { "content": "The woman prepares the ingredients while speaking to the camera.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera switches back to the glass bowl and rotates it to the left. The woman's left hand holds a white bowl, and her right hand uses a spoon to scoop two spoonfuls of flour from the white bowl into the glass bowl. Then, the woman puts down the spoon and places the white bowl in front of the camera.", "visual_elements": [ { "content": "The camera switches back to the glass bowl and rotates it to the left.", "type": "camera", "weight": 3 }, { "content": "The woman's left hand holds a white bowl,", "type": "attribute", "weight": 2 }, { "content": "The woman's right hand uses a spoon to scoop two spoonfuls of flour from the white bowl into the glass bowl.", "type": "action", "weight": 3 }, { "content": "The woman puts down the spoon and bowl.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera switches to a shot of the woman's upper torso. She looks at the camera and speaks.", "visual_elements": [ { "content": "The camera switches to a shot of the woman's upper torso.", "type": "camera", "weight": 3 }, { "content": "The woman at the camera and speaks.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 18 }, { "index": "TUNA_0546", "video_path": "YouCook2/103/H_9oM1Y60lU/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 10.04, "resolution": { "width": 640, "height": 360 }, "caption": "In the video, two men are conducting a cooking demonstration in the kitchen. The man on the left wears a white chef's coat with a black tie, while the man on the right wears a dark-colored shirt with a white apron tied around his waist. They stand in front of a spacious kitchen countertop, which is laden with various ingredients and cooking tools. The man on the left holds a whisk in his right hand, stirring the contents of a pot while supporting the pot handle with his left hand. The man on the right speaks while pouring ingredients from a bowl he holds in his left hand into the pot.\nThe camera switches to a close-up of the pot, where the boiling white liquid is emitting steam. One person grabs ingredients and adds yellow, stringy ones into the pot, while the other continuously stirs the contents with a whisk. The person adding ingredients to the pot has changed to a different ingredient and is adding it to the pot.", "events": [ { "event": "In the video, two men are conducting a cooking demonstration in the kitchen. The man on the left wears a white chef's coat with a black tie, while the man on the right wears a dark-colored shirt with a white apron tied around his waist. They stand in front of a spacious kitchen countertop, which is laden with various ingredients and cooking tools. The man on the left holds a whisk in his right hand, stirring the contents of a pot while supporting the pot handle with his left hand. The man on the right speaks while pouring ingredients from a bowl he holds in his left hand into the pot.", "visual_elements": [ { "content": "Two men are conducting a cooking demonstration in the kitchen.", "type": "camera", "weight": 3 }, { "content": "The man on the left wears a white chef's coat with a black tie.", "type": "attribute", "weight": 1 }, { "content": "The man on the right wears a dark-colored shirt with a white apron tied around his waist.", "type": "attribute", "weight": 1 }, { "content": "The two menstand in front of a spacious kitchen countertop.", "type": "attribute", "weight": 2 }, { "content": "There are various ingredients and cooking tools placed on the countertop.", "type": "scene", "weight": 1 }, { "content": "The man on the left holds a whisk in his right hand, stirring the contents of a pot.", "type": "action", "weight": 3 }, { "content": "The man on the left supports the pot handle with his left hand.", "type": "action", "weight": 3 }, { "content": "he man on the right speaks.", "type": "action", "weight": 3 }, { "content": "The man on the right holds in his left hand into the pot.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to a close-up of the pot, where the boiling white liquid is emitting steam. One person grabs ingredients and adds yellow, stringy ones into the pot, while the other continuously stirs the contents with a whisk. The person adding ingredients to the pot has changed to a different ingredient and is adding it to the pot.", "visual_elements": [ { "content": "The camera switches to a close-up of the pot.", "type": "camera", "weight": 3 }, { "content": "The white liquid boiling in the pot is constantly emitting steam.", "type": "attribute", "weight": 2 }, { "content": "One person grabs ingredients and adds yellow, stringy ones into the pot.", "type": "action", "weight": 3 }, { "content": "One person continuously stirs the contents with a whisk.", "type": "action", "weight": 3 }, { "content": "The person adding ingredients to the pot has changed to a different ingredient and is adding it to the pot.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0547", "video_path": "YouCook2/103/P8SY69LYkx0/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 19.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a wooden workbench with a transparent glass bowl placed on it, containing red meat filling. A person wearing a white chef's coat and white gloves stands nearby. A metal ring mold rests on a plastic bag beside it.\nThe chef grabs a handful of meat filling with his left hand and presses it a few times in his palm. Then, with his left hand, he moves the plastic bag next to the bowl, and with his right hand, he passes the meat filling to his left hand. He places the meat filling into the mold and presses it down a few times. Next, he reaches into the glass bowl with both hands, grabs some more meat filling with his left hand, and places it into the mold. Immediately afterward, he holds the mold with his right hand and repeatedly compresses the meat filling inside with his left hand.\nFinally, the chef lifts the mold, flips it over, and places it on the plastic bag. Then, with his right hand supporting the mold, he removes and lifts it off with his left hand.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wooden workbench with a transparent glass bowl placed on it, containing red meat filling. A person wearing a white chef's coat and white gloves stands nearby. A metal ring mold rests on a plastic bag beside it.", "visual_elements": [ { "content": "The camera focuses on a wooden workbench.", "type": "camera", "weight": 3 }, { "content": "There is a transparent glass bowl placed on workbench.", "type": "scene", "weight": 2 }, { "content": "There is red meat in the bowl.", "type": "attribute", "weight": 2 }, { "content": "A person wears a white chef's coat and white gloves.", "type": "attribute", "weight": 1 }, { "content": "Next to it, a metal ring mold rests on a plastic bag.", "type": "scene", "weight": 2 } ] }, { "event": "The chef grabs a handful of meat filling with his left hand and presses it a few times in his palm. Then, with his left hand, he moves the plastic bag next to the bowl, and with his right hand, he passes the meat filling to his left hand. He places the meat filling into the mold and presses it down a few times. Next, he reaches into the glass bowl with both hands, grabs some more meat filling with his left hand, and places it into the mold. Immediately afterward, he holds the mold with his right hand and repeatedly compresses the meat filling inside with his left hand.", "visual_elements": [ { "content": "The chef grabs a handful of meat filling with his left hand.", "type": "action", "weight": 3 }, { "content": "The chef presses it a few times in his palm.", "type": "action", "weight": 3 }, { "content": "The chef moves the plastic bag next to the bowl with his left hand.", "type": "action", "weight": 3 }, { "content": "The chef passes the meat filling from his right hand to his left hand.", "type": "action", "weight": 2 }, { "content": "The chef places the meat filling into the mold and presses it down a few times.", "type": "action", "weight": 3 }, { "content": "The chef reaches into the glass bowl with both hand.", "type": "action", "weight": 2 }, { "content": "The chef grabs some more meat filling with his left hand, and places it into the mold.", "type": "action", "weight": 3 }, { "content": "The chef holds the mold with his right hand.", "type": "action", "weight": 3 }, { "content": "The chef repeatedly compresses the meat filling inside with his left hand.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the chef lifts the mold, flips it over, and places it on the plastic bag. Then, with his right hand supporting the mold, he removes and lifts it off with his left hand.", "visual_elements": [ { "content": "The chef lifts the mold, flips it over, and places it on the plastic bag.", "type": "action", "weight": 3 }, { "content": "The chef takes off the mold with his left hand and picks it up.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0548", "video_path": "YouCook2/103/ho2JlRJp9rk/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 13.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "In the video, a man is cooking outdoors. He wears a black chef's uniform with the words \"Ballistic BBQ\" and \"Greg\" printed on it, and a black hat. The backdground is a lush green yard surrounded by wooden fences and some plants.\nAt the start of the video, the camera focuses on a large grill plate with a hamburger patty on it, topped with a piece of melted yellow cheese slice. On another part of the grill plate nearby, a pot filled with bubbling oil sits.\nNext, the camera switches to the male chef, who grabs two slices of bread from his right side, glances at the camera, and then begins cooking while looking down. He talks as he cooks. The camera then cuts back to the grill plate, where the chef places the two halves of the hamburger bun on the grill plate to toast.\nThe scene flips. The chef uses a spatula to flip the bread on the left side of the iron plate and places it beside the grill plate. Finally, the male chef grabs the spatula, preparing to flip the other slice of bread.", "events": [ { "event": "In the video, a man is cooking outdoors. He wears a black chef's uniform with the words \"Ballistic BBQ\" and \"Greg\" printed on it, and a black hat. The backdground is a lush green yard surrounded by wooden fences and some plants.", "visual_elements": [ { "content": "A man is cooking outdoors.", "type": "scene", "weight": 3 }, { "content": "The man wears a black chef's uniform with the words \"Ballistic BBQ\" and \"Greg\" printed on it, and a black hat.", "type": "attribute", "weight": 2 }, { "content": "The backdground is a lush green yard surrounded by wooden fences and some plants.", "type": "scene", "weight": 1 } ] }, { "event": "At the start of the video, the camera focuses on a large grill plate with a hamburger patty on it, topped with a piece of melted yellow cheese slice. On another part of the grill plate nearby, a pot filled with bubbling oil sits.", "visual_elements": [ { "content": "The camera focuses on a large grill plate.", "type": "camera", "weight": 3 }, { "content": "There is a hamburger patty on the grill plate.", "type": "attribute", "weight": 3 }, { "content": "There is a piece of melted yellow cheese slice on the patty.", "type": "attribute", "weight": 3 }, { "content": "On another part of the grill plate nearby, a pot filled with bubbling oil sits.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, the camera switches to the male chef, who grabs two slices of bread from his right side, glances at the camera, and then begins cooking while looking down. He talks as he cooks. The camera then cuts back to the grill plate, where the chef places the two halves of the hamburger bun on the grill plate to toast.", "visual_elements": [ { "content": "The camera switches to the male chef.", "type": "camera", "weight": 3 }, { "content": "The male chef grabs two slices of bread from his right side,", "type": "action", "weight": 3 }, { "content": "The male chef glances at the camera, and then begins cooking while looking down.", "type": "action", "weight": 3 }, { "content": "The male chef talks as he cooks.", "type": "action", "weight": 2 }, { "content": "The camera then cuts back to the grill plate.", "type": "camera", "weight": 3 }, { "content": "The male chef places the two halves of the hamburger bun on the grill plate to toast.", "type": "action", "weight": 3 } ] }, { "event": "The scene flips. The chef uses a spatula to flip the bread on the left side of the iron plate and places it beside the grill plate. Finally, the male chef grabs the spatula, preparing to flip the other slice of bread.", "visual_elements": [ { "content": "The scene flips.", "type": "camera", "weight": 3 }, { "content": "The male chef uses a spatula to flip the bread on the left side the iron plate.", "type": "action", "weight": 3 }, { "content": "The male chef places the bread beside the grill plate.", "type": "action", "weight": 3 }, { "content": "The male chef grabs the spatula, preparing to flip the other slice of bread.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0549", "video_path": "YouCook2/103/sJLapskS7KU/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Cooking", "duration": 22.06, "resolution": { "width": 1280, "height": 720 }, "caption": "In the video, two person are making hamburger patties in the kitchen. The person on the left wears a gray long-sleeved shirt, a black hat, and a white apron. The person on the right wears a black T-shirt with the words \"SAVE FERRIS\" printed on it and also wears a black hat. They stand at the kitchen counter, which has two cutting boards. The left cutting board holds a block of white cheese and a pile of raw beef, while the right cutting board also has a pile of raw beef and a slice of yellow cheese.\nBoth of them are bowing their heads, talking while using their hands to knead the meat mixture. They occasionally exchange words, and the person on the left occasionally glances at the person on the right. Subsequently, the person on the right breaks off half of the meat mixture in his hand and places it on the meat mixture on the cutting board. The person on the left places their meat mixture on the cutting board.\nThe camera switches to a close-up shot, showing the person on the left continuously pressing down on the meat mixture with his hands.\nThen, the camera cuts back to a shot of both of them. The person on the right holds up the meat mixture in his right hand, while the person on the left leans forward to inspect the meat mixture being held up by the person on the right. Afterward, the person on the left continues pressing down on the meat mixture with his hands before flipping it over. The person on the right continues kneading the meat mixture with both hands and then smiles at the camera. Meanwhile, the person on the left stops pressing down on the meat mixture, raises both hands, and speaks to the person on the right.", "events": [ { "event": "In the video, two person are making hamburger patties in the kitchen. The person on the left wears a gray long-sleeved shirt, a black hat, and a white apron. The person on the right wears a black T-shirt with the words \"SAVE FERRIS\" printed on it and also wears a black hat. They stand at the kitchen counter, which has two cutting boards. The left cutting board holds a block of white cheese and a pile of raw beef, while the right cutting board also has a pile of raw beef and a slice of yellow cheese.", "visual_elements": [ { "content": "In the video, two person are making hamburger patties in the kitchen.", "type": "scene", "weight": 3 }, { "content": "The person on the left wears a gray long-sleeved shirt, a black hat, and a white apron.", "type": "attribute", "weight": 1 }, { "content": "The person on the right wears a black T-shirt with the words \"SAVE FERRIS\" printed on it and also wears a black hat.", "type": "attribute", "weight": 1 }, { "content": "The two person stand at the kitchen counter.", "type": "attribute", "weight": 3 }, { "content": "There are two cutting boards on the counter.", "type": "attribute", "weight": 1 }, { "content": "The left cutting board holds a block of white cheese and a pile of raw beef, while the right cutting board also has a pile of raw beef and a slice of yellow cheese.", "type": "attribute", "weight": 2 } ] }, { "event": "Both of them are bowing their heads, talking while using their hands to knead the meat mixture. They occasionally exchange words, and the person on the left occasionally glances at the person on the right. Subsequently, the person on the right breaks off half of the meat mixture in his hand and places it on the meat mixture on the cutting board. The person on the left places their meat mixture on the cutting board.", "visual_elements": [ { "content": "Both of them are bowing their heads, talking while using their hands to knead the meat mixture.", "type": "action", "weight": 3 }, { "content": "Both of them occasionally exchange words, and the person on the left occasionally glances at the person on the right.", "type": "action", "weight": 2 }, { "content": "The person on the right breaks off half of the meat mixture in his hand and places it on the meat mixture on the cutting board.", "type": "action", "weight": 3 }, { "content": "The person on the left places the meat mixture on the cutting board.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches to a close-up shot, showing the person on the left continuously pressing down on the meat mixture with his hands.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The person on the left continuously presses down on the meat mixture with his hands.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera cuts back to a shot of both of them. The person on the right holds up the meat mixture in his right hand, while the person on the left leans forward to inspect the meat mixture being held up by the person on the right. Afterward, the person on the left continues pressing down on the meat mixture with his hands before flipping it over. The person on the right continues kneading the meat mixture with both hands and then smiles at the camera. Meanwhile, the person on the left stops pressing down on the meat mixture, raises both hands, and speaks to the person on the right.", "visual_elements": [ { "content": "The camera cuts back to a shot of both of them.", "type": "camera", "weight": 3 }, { "content": "The person on the right holds up the meat mixture in his right hand.", "type": "action", "weight": 3 }, { "content": "The person on the left leans forward to inspect the meat mixture being held up by the person on the right.", "type": "action", "weight": 3 }, { "content": "The person on the left continues pressing down on the meat mixture.", "type": "action", "weight": 3 }, { "content": "The person on the left flips over the the meat mixture.", "type": "action", "weight": 2 }, { "content": "The person on the right continues kneading the meat mixture with both hands.", "type": "action", "weight": 3 }, { "content": "The person on the right smiles at the camera.", "type": "action", "weight": 2 }, { "content": "The person on the left stops pressing down on the meat mixture.", "type": "action", "weight": 3 }, { "content": "The person on the left raises both hands, and speaks to the person on the right", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0550", "video_path": "YouCook2/103/sJLapskS7KU/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 24.06, "resolution": { "width": 1280, "height": 720 }, "caption": "The scene shows a barbecue grill with six halved tomato slices placed on one side. Each tomato slice is sprinkled with spices.\nAt the beginning of the video, a man uses a spatula to place a raw hamburger patty on the grill. Next, he uses a clip with his left hand to transfer the patty from the spatula to the grill. Subsequently, he places a second hamburger patty on the grill.\nThe camera then turns right to focus on the two patties. As the camera moves to the left, the man places onions on the grill. He uses the spatula to spread out the onion rings on the grill. Afterward, he covers the grill with its lid.", "events": [ { "event": "The scene shows a barbecue grill with six halved tomato slices placed on one side. Each tomato slice is sprinkled with spices.", "visual_elements": [ { "content": "The scene shows a barbecue grill with six halved tomato slices.", "type": "scene", "weight": 3 }, { "content": "Slices of tomatoes are placed on one side of the grill, with spices sprinkled on top of each tomato.", "type": "attribute", "weight": 2 } ] }, { "event": "At the beginning of the video, a man uses a spatula to place a raw hamburger patty on the grill. Next, he uses a clip with his left hand to transfer the patty from the spatula to the grill. Subsequently, he places a second hamburger patty on the grill.", "visual_elements": [ { "content": "The man uses a spatula to place a raw hamburger patty on the grill.", "type": "action", "weight": 3 }, { "content": "The man uses a clip with his left hand to transfer the patty from the spatula to the grill.", "type": "action", "weight": 3 }, { "content": "The man places a second hamburger patty on the grill again.", "type": "action", "weight": 3 } ] }, { "event": "The camera then turns right to focus on the two patties. As the camera moves to the left, the man places onions on the grill. He uses the spatula to spread out the onion rings on the grill. Afterward, he covers the grill with its lid.", "visual_elements": [ { "content": "The camera turns right to focus on the two patties.", "type": "camera", "weight": 3 }, { "content": "The camera moves to the left", "type": "camera", "weight": 3 }, { "content": "The man places onions on the grill.", "type": "action", "weight": 3 }, { "content": "The man uses the spatula to spread out the onion rings on the grill.", "type": "action", "weight": 3 }, { "content": "The man covers the grill with its lid.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0551", "video_path": "YouCook2/103/seLgqS2A_y0/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 12.05, "resolution": { "width": 1280, "height": 720 }, "caption": "A chef is sharpening a knife. The chef wears a black chef's coat with red trim. Several pieces of raw meat are placed on a wooden cutting board in front of the chef. A blender, a roll of kitchen paper, and a pineapple can be seen on the counter in front of the chef. In the background, a kitchen environment is visible, featuring black curtains and some kitchen equipment.\nNext, the camera switches to a close-up shot of the chef's hands as he cuts a piece of raw beef on a wooden cutting board. The beef is a bright red color, indicating its freshness. The chef uses a sharp knife to slice the beef into small pieces, demonstrating skilled and rapid movements.\nThe camera continues to focus on the chef's hands as he further cuts the sliced beef into smaller pieces.\nSubsequently, the camera cuts to the chef stacking the sliced beef pieces together and then chopping them up. The camera moves down and forward.\nThen, the camera shows the chef stacking the chopped beef pieces together and shaping them into a round patty with his hands.", "events": [ { "event": "A chef is sharpening a knife. The chef wears a black chef's coat with red trim. Several pieces of raw meat are placed on a wooden cutting board in front of the chef. A blender, a roll of kitchen paper, and a pineapple can be seen on the counter in front of the chef. In the background, a kitchen environment is visible, featuring black curtains and some kitchen equipment.", "visual_elements": [ { "content": "A chef is sharpening a knife.", "type": "action", "weight": 3 }, { "content": "The chef wears a black chef's coat with red trim.", "type": "attribute", "weight": 2 }, { "content": "Several pieces of raw meat are placed on a wooden cutting board in front of the chef.", "type": "attribute", "weight": 2 }, { "content": "A blender, a roll of kitchen paper, and a pineapple can be seen on the counter in front of the chef.", "type": "scene", "weight": 2 }, { "content": "In the background, a kitchen environment is visible, featuring black curtains and some kitchen equipment.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera switches to a close-up shot of the chef's hands as he cuts a piece of raw beef on a wooden cutting board. The beef is a bright red color, indicating its freshness. The chef uses a sharp knife to slice the beef into small pieces, demonstrating skilled and rapid movements.", "visual_elements": [ { "content": "The camera switches to a close-up shot of the chef's hands.", "type": "camera", "weight": 3 }, { "content": "The chef cuts a piece of raw beef on a wooden cutting board.", "type": "action", "weight": 3 }, { "content": "The beef is a bright red color, indicating its freshness.", "type": "attribute", "weight": 1 }, { "content": "The chef uses a sharp knife to slice the beef into small pieces, demonstrating skilled and rapid movements.", "type": "action", "weight": 3 } ] }, { "event": "The camera continues to focus on the chef's hands as he further cuts the sliced beef into smaller pieces.", "visual_elements": [ { "content": "The camera continues to focus on the chef's hands", "type": "camera", "weight": 3 }, { "content": "The chef further cuts the sliced beef into smaller pieces.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera cuts to the chef stacking the sliced beef pieces together and then chopping them up. The camera moves down and forward.", "visual_elements": [ { "content": "The camera cuts to the chef stacking the sliced beef pieces together.", "type": "camera", "weight": 3 }, { "content": "The chef piles up the sliced beef and then chops them up.", "type": "action", "weight": 3 }, { "content": "The camera moves down and forward.", "type": "camera", "weight": 3 } ] }, { "event": "Then, the camera shows the chef stacking the chopped beef pieces together and shaping them into a round patty with his hands.", "visual_elements": [ { "content": "The camera shows the chef stacking the chopped beef pieces together", "type": "camera", "weight": 3 }, { "content": "The chef shaps them into a round patty with his hands.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 16 }, { "index": "TUNA_0552", "video_path": "YouCook2/103/seLgqS2A_y0/split_7.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 5.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video starts with a shot of a male with long, grayish-white hair, wearing a black chef's coat. The background is a modern kitchen with clearly visible cabinets and an oven. Next, the camera cuts to a stove with two cast-iron pots placed on two burners. The pot on the left contains three well-browned hamburger patties, while the pot on the right has some oil in it.\nA hand appears in the frame, holding four sliced hamburger buns and placing them one by one into the pan on the right, with the cut side down.\nNext, the camera cuts to a closer shot, showing the four hamburger buns in the pan on the right. The hand uses a spatula to gently flip one of the buns.", "events": [ { "event": "The video starts with a shot of a male with long, grayish-white hair, wearing a black chef's coat. The background is a modern kitchen with clearly visible cabinets and an oven. Next, the camera cuts to a stove with two cast-iron pots placed on two burners. The pot on the left contains three well-browned hamburger patties, while the pot on the right has some oil in it.", "visual_elements": [ { "content": "The video starts with a shot of a male.", "type": "camera", "weight": 3 }, { "content": "The man wears a black chef's coat with long, grayish-white hair.", "type": "attribute", "weight": 1 }, { "content": "The background is a modern kitchen with clearly visible cabinets and an oven.", "type": "scene", "weight": 1 }, { "content": "The camera cuts to a stove.", "type": "camera", "weight": 3 }, { "content": "On the stove , two cast-iron pots are placed on two burners respectively.", "type": "attribute", "weight": 2 }, { "content": "The pot on the left contains three well-browned hamburger patties, while the pot on the right has some oil in it.", "type": "attribute", "weight": 3 } ] }, { "event": "A hand appears in the frame, holding four sliced hamburger buns and placing them one by one into the pan on the right, with the cut side down.", "visual_elements": [ { "content": "A hand appears in the frame, holding sliced hamburger buns.", "type": "action", "weight": 3 }, { "content": "The hands places them one by one into the pan on the right, with the cut side down.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a closer shot, showing the four hamburger buns in the pan on the right. The hand uses a spatula to gently flip one of the buns.", "visual_elements": [ { "content": "The camera cuts to a closer shot.", "type": "camera", "weight": 3 }, { "content": "The chef piles up the sliced beef and then chops them up.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0553", "video_path": "YouCook2/104/NujJqJUXSQY/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 14.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a woman standing at the kitchen counter. She wears a plaid shirt, paired with a black vest underneath, and a red apron over her lower body. A necklace hangs around her neck, and a red watch is on her wrist. In the background, a white refrigerator can be seen, with several colorful papers stuck on it. Next to the refrigerator is an open wooden shelf displaying various kitchen utensils and ingredients. The woman speaks while looking at the camera.\nNext, the camera cuts to a close-up shot of butter and eggs on the counter. A hand removes a glass measuring cup from the table and then picks up an egg.\nThe camera switches back to the woman, who picks up an egg from a plate on the counter and taps it against the measuring cup, cracking it open and letting the egg yolk and white flow into the cup. She repeats the same action with a second egg, cracking it into the measuring cup.\nThe camera cuts to a close-up shot of the egg mixture flowing into the measuring cup.\nThen, the camera returns to the woman, who continues to crack eggs with practiced movements. She speaks while cracking the eggs.", "events": [ { "event": "At the beginning of the video, the camera focuses on a woman standing at the kitchen counter. She wears a plaid shirt, paired with a black vest underneath, and a red apron over her lower body. A necklace hangs around her neck, and a red watch is on her wrist. In the background, a white refrigerator can be seen, with several colorful papers stuck on it. Next to the refrigerator is an open wooden shelf displaying various kitchen utensils and ingredients. The woman speaks while looking at the camera.", "visual_elements": [ { "content": "The camera focuses on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman stands at the kitchen counter.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a plaid shirt, paired with a black vest underneath, and a red apron over her lower body.", "type": "attribute", "weight": 1 }, { "content": "In the background, a white refrigerator can be seen, with several colorful papers stuck on it.", "type": "scene", "weight": 1 }, { "content": "Next to the refrigerator is an open wooden shelf displaying various kitchen utensils and ingredients.", "type": "scene", "weight": 1 }, { "content": "The woman speaks while looking at the camera.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to a close-up shot of butter and eggs on the counter. A hand removes a glass measuring cup from the table and then picks up an egg.", "visual_elements": [ { "content": "The camera cuts to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The frame shows the butter and eggs on the operating table.", "type": "scene", "weight": 2 }, { "content": "A hand removes a glass measuring cup from the table.", "type": "action", "weight": 3 }, { "content": "A hand picks up an egg.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches back to the woman, who picks up an egg from a plate on the counter and taps it against the measuring cup, cracking it open and letting the egg yolk and white flow into the cup. She repeats the same action with a second egg, cracking it into the measuring cup.", "visual_elements": [ { "content": "The camera switches back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman picks up an egg from a plate on the counter", "type": "action", "weight": 3 }, { "content": "The woman taps the egg against the measuring cup", "type": "action", "weight": 3 }, { "content": "The woman cracks an egg into the measuring cup.", "type": "action", "weight": 3 }, { "content": "The woman repeats the same action with a second egg, cracking it into the measuring cup.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up shot of the egg mixture flowing into the measuring cup.", "visual_elements": [ { "content": "The camera cuts to a close-up shot of the egg mixture flowing into the measuring cup.", "type": "camera", "weight": 3 } ] }, { "event": "Then, the camera returns to the woman, who continues to crack eggs with practiced movements. She speaks while cracking the eggs.", "visual_elements": [ { "content": "The camera returns to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman continues to crack eggs with practiced movements.", "type": "action", "weight": 3 }, { "content": "The woman speaks while cracking the eggs.", "type": "action", "weight": 3 } ] } ], "n_events": 5, "n_elements": 19 }, { "index": "TUNA_0554", "video_path": "YouCook2/104/NujJqJUXSQY/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 10.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video starts with a shot of a woman standing at the kitchen counter. She wears a gray and white plaid shirt with a black top underneath and a red apron tied around her waist. A necklace hangs around her neck, and a red watch is on her wrist. In the background, a white refrigerator can be seen, with several colorful papers stuck on it, and next to it is a black metal shelf displaying various kitchen utensils and ingredients.\nIn the video, the woman holds a blue-green ceramic jug. She tilts the jug, pouring the liquid into a transparent measuring cup that already contains egg mixture.\nThe camera cuts to a close-up shot of the liquid pouring out of the jug and flowing into the measuring cup.\nAfter pouring the liquid, the camera switches back to the woman. She puts down the jug and speaks while looking at the camera and gestures with both hands on either side.", "events": [ { "event": "The video starts with a shot of a woman standing at the kitchen counter. She wears a gray and white plaid shirt with a black top underneath and a red apron tied around her waist. A necklace hangs around her neck, and a red watch is on her wrist. In the background, a white refrigerator can be seen, with several colorful papers stuck on it, and next to it is a black metal shelf displaying various kitchen utensils and ingredients.", "visual_elements": [ { "content": "The camera focuses on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman stands at the kitchen counter.", "type": "attribute", "weight": 3 }, { "content": "The woman wears a gray and white plaid shirt with a black top underneath and a red apron tied around her waist.", "type": "attribute", "weight": 1 }, { "content": "In the background, a white refrigerator can be seen, with several colorful papers stuck on it.", "type": "scene", "weight": 2 }, { "content": "To the left of the refrigerator is a black metal shelf on which are placed various kitchen utensils and ingredients.", "type": "scene", "weight": 2 } ] }, { "event": "In the video, the woman holds a blue-green ceramic jug. She tilts the jug, pouring the liquid into a transparent measuring cup that already contains egg mixture.", "visual_elements": [ { "content": "The woman holds a blue-green ceramic jug.", "type": "attribute", "weight": 3 }, { "content": "The woman tilts the jug, pouring the liquid into a transparent measuring cup that already contains egg mixture.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up shot of the liquid pouring out of the jug and flowing into the measuring cup.", "visual_elements": [ { "content": "The chef shaps them into a round patty with his hands.", "type": "camera", "weight": 3 }, { "content": "The chef piles up the sliced beef and then chops them up.", "type": "scene", "weight": 3 } ] }, { "event": "After pouring the liquid, the camera switches back to the woman. She puts down the jug and speaks while looking at the camera and gestures with both hands on either side.", "visual_elements": [ { "content": "The camera switches back to the woman.", "type": "camera", "weight": 3 }, { "content": "The woman puts down the jug.", "type": "action", "weight": 3 }, { "content": "The woman speaks while looking at the camera.", "type": "action", "weight": 3 }, { "content": "The woman gestures with both hands on either side.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 13 }, { "index": "TUNA_0555", "video_path": "YouCook2/104/olrxEUXmlVA/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 21.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a black non-stick pan with a small amount of oil inside. Then, a hand holding a white plate filled with meat pieces marinated in red sauce. Another hand holds a metal spoon and scoops the meat pieces from the plate into the pan.\nSubsequently, the camera cuts to a shot of a hand holding a wooden spatula, which begins to gently stir the meat pieces in the pan. The camera switches angles multiple times, showing the stirring process from different perspectives. The wooden spatula continuously stirs the meat pieces to ensure they are evenly heated.", "events": [ { "event": "At the beginning of the video, the camera focuses on a black non-stick pan with a small amount of oil inside. Then, a hand holding a white plate filled with meat pieces marinated in red sauce. Another hand holds a metal spoon and scoops the meat pieces from the plate into the pan.", "visual_elements": [ { "content": "The camera focuses on a black non-stick pan.", "type": "camera", "weight": 3 }, { "content": "There are a small amount of oil in the pan.", "type": "attribute", "weight": 3 }, { "content": "A hand holds a white plate filled.", "type": "attribute", "weight": 2 }, { "content": "The plate is filled with meat pieces marinated in red sauce.", "type": "attribute", "weight": 2 }, { "content": "Another hand holds a metal spoon.", "type": "attribute", "weight": 3 }, { "content": "This hand scoops the meat pieces from the plate into the pan.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera cuts to a shot of a hand holding a wooden spatula, which begins to gently stir the meat pieces in the pan. The camera switches angles multiple times, showing the stirring process from different perspectives. The wooden spatula continuously stirs the meat pieces to ensure they are evenly heated.", "visual_elements": [ { "content": "The camera cuts to a shot of a hand holding a wooden spatula.", "type": "camera", "weight": 3 }, { "content": "The spatula begins to gently stir the meat pieces in the pan.", "type": "action", "weight": 3 }, { "content": "The camera switches angles multiple times, showing the stirring process from different perspectives.", "type": "camera", "weight": 3 }, { "content": "The wooden spatula continuously stirs the meat pieces to ensure they are evenly heated.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0556", "video_path": "YouCook2/104/s9r-CxnCXkg/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 27.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The camera focuses on a stove with a pot on it. A hand reaches forward to grab some seasoning and puts it into a white bowl held by the other hand. Subsequently, the scene cuts to a man. The camera moves up, revealing the man holding a bowl of egg mixture in his left hand while his right hand is raised high, sprinkling seasoning into the bowl.\nNext, the camera cuts to the pot as the egg mixture is poured into it. The scene shifts back to the man, who now uses his right hand to grab a wooden spatula from a utensil container. He lifts the spatula and speaks to the camera. The camera shifts to a close-up of the man's hands. His left hand holds the pot handle, while his right hand stirs the mixture in the small pot with the spatula. Then, he picks up the small pot. The camera zooms in, showing the spatula stirring the mixture inside as the egg mixture gradually cooks.\nThe camera shifts back to the man. He releases the spatula and pot handle, raises both hands, moves to the right, and speaks to the camera. Subsequently, the camera shifts back to the spatula as the man continues to stir the mixture in the pot. The camera cuts back to the spatula, and the man continues stirring in the pot with the wooden spatula. The camera shifts focus once again to inside the pot, revealing that the liquid is gradually transforming into a thick, orange paste. The man continues stirring to ensure the liquid is evenly heated. A close-up shot shows the man's hands and the liquid in the pot.", "events": [ { "event": "The camera focuses on a stove with a pot on it. A hand reaches forward to grab some seasoning and puts it into a white bowl held by the other hand. Subsequently, the scene cuts to a man. The camera moves up, revealing the man holding a bowl of egg mixture in his left hand while his right hand is raised high, sprinkling seasoning into the bowl.", "visual_elements": [ { "content": "The camera focuses on a stove.", "type": "camera", "weight": 3 }, { "content": "There is a pot on the stove.", "type": "attribute", "weight": 2 }, { "content": "A hand reaches forward to grab some seasoning and puts it into a white bowl held by the other hand.", "type": "action", "weight": 2 }, { "content": "The scene cuts to a man.", "type": "camera", "weight": 3 }, { "content": "The camera moves up.", "type": "camera", "weight": 3 }, { "content": "The man holding a bowl of egg mixture in his left hand while his right hand is raised high, sprinkling seasoning into the bowl.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera cuts to the pot as the egg mixture is poured into it. The scene shifts back to the man, who now uses his right hand to grab a wooden spatula from a utensil container. He lifts the spatula and speaks to the camera. The camera shifts to a close-up of the man's hands. His left hand holds the pot handle, while his right hand stirs the mixture in the small pot with the spatula. Then, he picks up the small pot. The camera zooms in, showing the spatula stirring the mixture inside as the egg mixture gradually cooks.", "visual_elements": [ { "content": "The camera cuts to the pot.", "type": "camera", "weight": 3 }, { "content": "The egg mixture is poured into the pot.", "type": "scene", "weight": 2 }, { "content": "The scene shifts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man uses his right hand to grab a wooden spatula from a utensil container.", "type": "action", "weight": 3 }, { "content": "The man lifts the spatula and speaks to the camera.", "type": "action", "weight": 3 }, { "content": "The camera shifts to a close-up of the man's hands.", "type": "camera", "weight": 3 }, { "content": "The man's left hand holds the pot handle, while his right hand stirs the mixture in the pot with the spatula.", "type": "action", "weight": 3 }, { "content": "The man picks up the small pot.", "type": "action", "weight": 2 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The spatula stirring the mixture inside as the egg mixture gradually cooks are showned.", "type": "scene", "weight": 3 } ] }, { "event": "The camera shifts back to the man. He releases the spatula and pot handle, raises both hands, moves to the right, and speaks to the camera. Subsequently, the camera shifts back to the spatula as the man continues to stir the mixture in the pot. The camera cuts back to the spatula, and the man continues stirring in the pot with the wooden spatula. The camera shifts focus once again to inside the pot, revealing that the liquid is gradually transforming into a thick, orange paste. The man continues stirring to ensure the liquid is evenly heated. A close-up shot shows the man's hands and the liquid in the pot.", "visual_elements": [ { "content": "The camera shifts back to the man.", "type": "camera", "weight": 3 }, { "content": "The man releases the spatula and pot handle, raises both hands, moves to the right, and speaks to the camera.", "type": "action", "weight": 3 }, { "content": "The camera shifts back to the pot.", "type": "camera", "weight": 3 }, { "content": "The man continues stirring in the pot with the wooden spatula.", "type": "action", "weight": 3 }, { "content": "The camera shifts focus once again to inside the pot.", "type": "camera", "weight": 3 }, { "content": "The liquid is gradually transforming into a thick, orange paste.", "type": "scene", "weight": 3 }, { "content": "A close-up shot shows the man's hand and the liquid in the pot.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0557", "video_path": "YouCook2/105/UtVDwLyQz08/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 14.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a brown marble countertop with two transparent glass bowls placed on it. One bowl contains several raw chicken thighs, while the other is empty. A hand, adorned with a gold ring, appears in the frame holding a gray measuring cup filled with a white mixture. The hand pours the mixture from the measuring cup into the empty glass bowl on the left.\nThe scene shifts. This time, the hand holds a measuring cup filled with liquid and pours the white liquid into the left glass bowl.\nThe scene changes again. The hand now holds a metal whisk, using it to stir the mixture in the bowl. The liquid in the bowl becomes more uniform, with bubbles appearing on the surface. In the bottom right corner of the screen, the text \"Youtube.com/CheezsKitchen\" and \"Facebook.com/CheezsMakeup\" are consistently displayed, indicating the video's source.", "events": [ { "event": "The video begins with the camera focusing on a brown marble countertop with two transparent glass bowls placed on it. One bowl contains several raw chicken thighs, while the other is empty. A hand, adorned with a gold ring, appears in the frame holding a gray measuring cup filled with a white mixture. The hand pours the mixture from the measuring cup into the empty glass bowl on the left.", "visual_elements": [ { "content": "The camera focusing on a brown marble countertop.", "type": "camera", "weight": 3 }, { "content": "There are two transparent glass bowls placed on countertop.", "type": "attribute", "weight": 2 }, { "content": "One bowl contains several raw chicken thighs, while the other is empty.", "type": "attribute", "weight": 2 }, { "content": "A hand appears in the frame.", "type": "attribute", "weight": 2 }, { "content": "There is a gold ring on the hand.", "type": "attribute", "weight": 1 }, { "content": "A hand holds a gray measuring cup.", "type": "attribute", "weight": 3 }, { "content": "The measuring cup filled with a white mixture.", "type": "attribute", "weight": 3 }, { "content": "The hand pours the mixture from the measuring cup into the empty glass bowl on the left.", "type": "action", "weight": 3 } ] }, { "event": "The scene shifts. This time, the hand holds a measuring cup filled with liquid and pours the white liquid into the left glass bowl.", "visual_elements": [ { "content": "The scene shifts.", "type": "camera", "weight": 3 }, { "content": "This time, the hand holds a measuring cup filled with liquid.", "type": "attribute", "weight": 3 }, { "content": "The hand pours the white liquid into the left glass bowl.", "type": "action", "weight": 3 } ] }, { "event": "The scene changes again. The hand now holds a metal whisk, using it to stir the mixture in the bowl. The liquid in the bowl becomes more uniform, with bubbles appearing on the surface. In the bottom right corner of the screen, the text \"Youtube.com/CheezsKitchen\" and \"Facebook.com/CheezsMakeup\" are consistently displayed, indicating the video's source.", "visual_elements": [ { "content": "The scene shifts.", "type": "camera", "weight": 3 }, { "content": "The hand holds a metal whisk.", "type": "attribute", "weight": 3 }, { "content": "The stirrer stirs the mixture in the bowl.", "type": "action", "weight": 3 }, { "content": "The liquid in the bowl becomes more uniform, with bubbles appearing on the surface.", "type": "attribute", "weight": 3 }, { "content": "In the bottom right corner of the screen, the text \"Youtube.com/CheezsKitchen\" and \"Facebook.com/CheezsMakeup\" are consistently displayed, indicating the video's source.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0558", "video_path": "YouCook2/105/ixaY3TcKFFA/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 28.03, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video starts with the camera focusing on a kitchen scene. In the kitchen, a woman dressed in a black sleeveless top and gray apron stands at the kitchen counter. Two bowls are placed on the counter, one containing flour and the other containing marinated chicken pieces. In the background, kitchen cabinets, an oven, a rice cooker, and other kitchen appliances are visible.\nThe woman uses her right hand to take a few chicken pieces from the bowl and places them into the bowl with flour, rolling the chicken in the flour to coat it evenly. While preparing the ingredients, she speaks.\nThe camera cuts to a close-up shot. Then, she places the flour-coated chicken pieces on a tray lined with tin foil next to her. She repeats this action, coating each piece of chicken with flour and placing it on the tray. The number of chicken pieces on the tray gradually increases, arranged neatly.\nAt the end of the video, the camera shifts to the tray, showing all the flour-coated chicken pieces neatly arranged on it.", "events": [ { "event": "The video starts with the camera focusing on a kitchen scene. In the kitchen, a woman dressed in a black sleeveless top and gray apron stands at the kitchen counter. Two bowls are placed on the counter, one containing flour and the other containing marinated chicken pieces. In the background, kitchen cabinets, an oven, a rice cooker, and other kitchen appliances are visible.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "There is a woman in the kitchen.", "type": "attribute", "weight": 3 }, { "content": "A woman is dressed in a black sleeveless top and gray apron.", "type": "attribute", "weight": 1 }, { "content": "The woman stands at the kitchen counter.", "type": "attribute", "weight": 3 }, { "content": "Two bowls are placed on the counter, one containing flour and the other containing marinated chicken pieces.", "type": "attribute", "weight": 2 }, { "content": "In the background, kitchen cabinets, an oven, a rice cooker, and other kitchen appliances are visible.", "type": "scene", "weight": 1 } ] }, { "event": "The woman uses her right hand to take a few chicken pieces from the bowl and places them into the bowl with flour, rolling the chicken in the flour to coat it evenly. While preparing the ingredients, she speaks.", "visual_elements": [ { "content": "The woman uses her right hand to take a few chicken pieces from the bowl.", "type": "action", "weight": 3 }, { "content": "The woman places them into the bowl with flour.", "type": "action", "weight": 3 }, { "content": "The woman rolls the chicken in the flour to coat it evenly.", "type": "action", "weight": 3 }, { "content": "The woman speaks while preparing the ingredients.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a close-up shot. Then, she places the flour-coated chicken pieces on a tray lined with tin foil next to her. She repeats this action, coating each piece of chicken with flour and placing it on the tray. The number of chicken pieces on the tray gradually increases, arranged neatly.", "visual_elements": [ { "content": "The camera cuts to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The woman places the chicken pieces coated with flour on a tray lined with tin foil beside.", "type": "action", "weight": 3 }, { "content": "The woman repeats this action, coating each piece of chicken with flour and placing it on the tray.", "type": "action", "weight": 3 }, { "content": "The number of chicken pieces on the tray gradually increases, arranged neatly.", "type": "attribute", "weight": 2 } ] }, { "event": "At the end of the video, the camera shifts to the tray, showing all the flour-coated chicken pieces neatly arranged on it.", "visual_elements": [ { "content": "The camera shifts to the tray.", "type": "camera", "weight": 3 }, { "content": "All the flour-coated chicken pieces neatlyare arranged on the tray.", "type": "attribute", "weight": 3 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0559", "video_path": "YouCook2/105/m6Fuxu5LjYg/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 21.05, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a transparent glass bowl containing liquid and some ingredients. A hand holding an egg whisk with a black handle stirs the liquid inside, which appears milky white, and bubbles can be seen forming on the surface during the stirring process. Next, the camera cuts to a hand placing several raw pieces of meat into the liquid. The meat pieces are submerged in the milky white liquid.\nThen, the camera shifts to a wooden cutting board, showing a a piece of raw chicken. One hand holds the chicken while the other holds a knife, slicing the chicken in half. The cut can be seen revealing the meat and bone. Both hands pick up the two halves of the chicken and turn them left and right to show the camera.\nSubsequently, the camera returns to the glass bowl, where fingers gently stir the liquid. The fingers press down on a chicken piece, fully immersing it in the liquid. Afterward, text prompts “Make Sure It's Submerged” appear on the screen.", "events": [ { "event": "At the beginning of the video, the camera focuses on a transparent glass bowl containing liquid and some ingredients. A hand holding an egg whisk with a black handle stirs the liquid inside, which appears milky white, and bubbles can be seen forming on the surface during the stirring process. Next, the camera cuts to a hand placing several raw pieces of meat into the liquid. The meat pieces are submerged in the milky white liquid.", "visual_elements": [ { "content": "The camera focuses on a transparent glass bowl.", "type": "camera", "weight": 3 }, { "content": "The bowl contains liquid and some ingredients.", "type": "attribute", "weight": 2 }, { "content": "A hand holds an egg whisk with a black handle stirs the liquid in in bowl.", "type": "action", "weight": 3 }, { "content": "The liquid appears milky white, and bubbles can be seen forming on the surface during the stirring process.", "type": "attribute", "weight": 2 }, { "content": "The camera cuts to a hand placing several raw pieces of meat into the liquid.", "type": "camera", "weight": 3 }, { "content": "The meat pieces are submerged in the milky white liquid.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera shifts to a wooden cutting board, showing a a piece of raw chicken. One hand holds the chicken while the other holds a knife, slicing the chicken in half. The cut can be seen revealing the meat and bone. Both hands pick up the two halves of the chicken and turn them left and right to show the camera.", "visual_elements": [ { "content": "The camera shifts to a wooden cutting board.", "type": "camera", "weight": 3 }, { "content": "The frame shows a piece of raw chicken.", "type": "scene", "weight": 2 }, { "content": "One hand holds the chicken", "type": "action", "weight": 3 }, { "content": "the other holds a knife, slicing the chicken in half.", "type": "action", "weight": 3 }, { "content": "The cut can be seen revealing the meat and bone.", "type": "attribute", "weight": 1 }, { "content": "Both hands pick up the two halves of the chicken and turn them left and right to show the camera.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera returns to the glass bowl, where fingers gently stir the liquid. The fingers press down on a chicken piece, fully immersing it in the liquid. Afterward, text prompts “Make Sure It's Submerged” appear on the screen.", "visual_elements": [ { "content": "The camera returns to the glass bowl.", "type": "camera", "weight": 3 }, { "content": "The fingers gently stir the liquid", "type": "action", "weight": 3 }, { "content": "The fingers press down on a chicken piece, fully immersing it in the liquid.", "type": "action", "weight": 3 }, { "content": "Text prompts “Make Sure It's Submerged” appear on the screen.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0560", "video_path": "YouCook2/106/6Mi3xrBF1sY/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 7.0, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a stove, featuring two pots. One pot contains a white liquid, while the other is empty. A hand appears on the right side of the screen, holding a transparent measuring cup filled with orange-yellow shredded cheese.\nThe hand moves the measuring cup towards the pot with the white liquid and gradually tilts it to pour the cheese into the pot. After pouring half of the cheese from the measuring cup, the hand tilts the cup upwards, stopping the flow of shredded cheese. In the bottom right corner of the video, white text appears reading \"Désirée/Dezi1016.\"", "events": [ { "event": "At the beginning of the video, the camera focuses on a stove, featuring two pots. One pot contains a white liquid, while the other is empty. A hand appears on the right side of the screen, holding a transparent measuring cup filled with orange-yellow shredded cheese.", "visual_elements": [ { "content": "The camera focuses on a stove.", "type": "camera", "weight": 3 }, { "content": "There are two pots on the stove.", "type": "scene", "weight": 2 }, { "content": "One pot closed to the camera contains a white liquid, while the other is empty.", "type": "attribute", "weight": 2 }, { "content": "A hand appears on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The hand holds a transparent measuring cup.", "type": "action", "weight": 3 }, { "content": "The transparent measuring cup is filled with orange-yellow shredded cheese.", "type": "attribute", "weight": 3 } ] }, { "event": "The hand moves the measuring cup towards the pot with the white liquid and gradually tilts it to pour the cheese into the pot. After pouring half of the cheese from the measuring cup, the hand tilts the cup upwards, stopping the flow of shredded cheese. In the bottom right corner of the video, white text appears reading \"Désirée/Dezi1016.\"", "visual_elements": [ { "content": "The hand moves the measuring cup towards the pot with the white liquid.", "type": "action", "weight": 3 }, { "content": "Measuring cup is gradually tilted to pour the cheese into the pot.", "type": "action", "weight": 3 }, { "content": "After pouring half of the cheese from the measuring cup, the hand tilts the cup upwards, stopping the flow of shredded cheese.", "type": "action", "weight": 2 }, { "content": "In the bottom right corner of the video, white text appears reading \"Désirée/Dezi1016.\"", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0561", "video_path": "YouCook2/106/QUV_oA9DrGw/split_4.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 22.02, "resolution": { "width": 640, "height": 480 }, "caption": "At the beginning of the video, the camera focuses on a black pot containing white liquid. There are some bubbles on the surface of the liquid. The pot is placed on a gas stove, with the background showing a kitchen countertop and some kitchen utensils.\nNext, the camera switches to a closer view, showing the liquid gently boiling in the pot. Then, a black spoon appears in the frame, stirring the liquid in the pot. A hand holds a white plate with yellow square-shaped objects. The hand tilts the plate, and the black spoon scoops some of the yellow squares off. The spoon continues stirring in the pot. The spoon exits the frame from the left side.", "events": [ { "event": "At the beginning of the video, the camera focuses on a black pot containing white liquid. There are some bubbles on the surface of the liquid. The pot is placed on a gas stove, with the background showing a kitchen countertop and some kitchen utensils.", "visual_elements": [ { "content": "The camera focus on a black pot.", "type": "camera", "weight": 3 }, { "content": "The pot contains white liquid.", "type": "attribute", "weight": 2 }, { "content": "There are some bubbles on the surface of the liquid.", "type": "attribute", "weight": 1 }, { "content": "The pot is placed on a gas stove, with the background showing a kitchen countertop and some kitchen utensils.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera switches to a closer view, showing the liquid gently boiling in the pot. Then, a black spoon appears in the frame, stirring the liquid in the pot. A hand holds a white plate with yellow square-shaped objects. The hand tilts the plate, and the black spoon scoops some of the yellow squares off. The spoon continues stirring in the pot. The spoon exits the frame from the left side.", "visual_elements": [ { "content": "The camera switches to a closer angle.", "type": "camera", "weight": 3 }, { "content": "It shows the liquid gently boiling in the pot.", "type": "scene", "weight": 1 }, { "content": "A black spoon appears in the frame.", "type": "scene", "weight": 3 }, { "content": "The spoon stirs the liquid in the pot.", "type": "action", "weight": 3 }, { "content": "A hand holds a white plate with yellow cube-shaped pieces.", "type": "action", "weight": 3 }, { "content": "The hand tilts the plate.", "type": "action", "weight": 3 }, { "content": "The black spoon scoops the yellow cubes into the pot.", "type": "action", "weight": 3 }, { "content": "The spoon continues stirring in the pot.", "type": "action", "weight": 3 }, { "content": "The spoon exits the frame from the left side.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0562", "video_path": "YouCook2/106/W882NYaaiEs/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 15.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen stove, where there's a black pot containing some liquid. A man stands by the stove, stirring the pot with a wooden spoon. The man is wearing dark clothing, and in his right hand, he holds a transparent measuring cup with red markings, filled with white liquid. The man continuously stirs the thick liquid in the pot while slowly pouring the white liquid from the measuring cup into it.\nThe scene changes to show another pot on the left side of the stove, where some minced meat is being fried. On the right side, the black pot contains white liquid, with the wooden spoon gently stirring inside.", "events": [ { "event": "The video begins with the camera focusing on a kitchen stove, where there's a black pot containing some liquid. A man stands by the stove, stirring the pot with a wooden spoon. The man is wearing dark clothing, and in his right hand, he holds a transparent measuring cup with red markings, filled with white liquid. The man continuously stirs the thick liquid in the pot while slowly pouring the white liquid from the measuring cup into it.", "visual_elements": [ { "content": "The camera focuses on a kitchen stove.", "type": "camera", "weight": 3 }, { "content": "There's a black pot on the stove.", "type": "scene", "weight": 2 }, { "content": "The pot contains yellow, thick liquid.", "type": "attribute", "weight": 3 }, { "content": "A man stands by the stove, stirring the pot with a wooden spoon.", "type": "action", "weight": 3 }, { "content": "The man is wearing dark clothing.", "type": "attribute", "weight": 1 }, { "content": "The man holds a transparent measuring cup in his right hand.", "type": "attribute", "weight": 3 }, { "content": "The measuring cup has red markings and is filled with white liquid.", "type": "attribute", "weight": 2 }, { "content": "The man continuously stirs the thick liquid in the pot while slowly pouring the white liquid from the measuring cup into it.", "type": "action", "weight": 3 } ] }, { "event": "The scene changes to show another pot on the left side of the stove, where some minced meat is being fried. On the right side, the black pot contains white liquid, with the wooden spoon gently stirring inside.", "visual_elements": [ { "content": "The scene changes.", "type": "camera", "weight": 3 }, { "content": "There's another pot on the left side of the stove.", "type": "scene", "weight": 1 }, { "content": "Some minced meat is being fried in the pot.", "type": "attribute", "weight": 1 }, { "content": "The black pot on the right contains white liquid.", "type": "attribute", "weight": 1 }, { "content": "The wooden spoon gently stirs in the pot.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0563", "video_path": "YouCook2/106/XsALTvYUTI8/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 19.05, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on an empty pot with black interior and red exterior.\n\nA glass container filled with clear liquid enters the frame from the upper left corner. The container pours the clear liquid into the pot, then exits the frame from the left side.\nA transparent measuring cup containing white liquid enters the frame from the left. The measuring cup tilts, and the white liquid pours into the pot, blending with the clear liquid inside. Finally, the measuring cup shakes slightly above the pot.\nThen, a blue bowl appears in the frame, filled with yellow macaroni. The bowl tilts, pouring the macaroni into the pot, where it spreads out in the white liquid.\nAfter all the macaroni is poured into the pot, a black stirring spoon stirs in the pot, evenly distributing the macaroni in the liquid.", "events": [ { "event": "The camera focuses on an empty pot with black interior and red exterior.\n\nA glass container filled with clear liquid enters the frame from the upper left corner. The container pours the clear liquid into the pot, then exits the frame from the left side.", "visual_elements": [ { "content": "The camera focuses on an empty pot.", "type": "camera", "weight": 3 }, { "content": "The inside of the pot is black, and the outside is red.", "type": "attribute", "weight": 1 }, { "content": "A glass container filled with transparent liquid enters the frame from the top left.", "type": "scene", "weight": 3 }, { "content": "The glass container pours the transparent liquid into the pot.", "type": "action", "weight": 3 }, { "content": "The glass container exits the frame from the left side.", "type": "action", "weight": 1 } ] }, { "event": "A transparent measuring cup containing white liquid enters the frame from the left. The measuring cup tilts, and the white liquid pours into the pot, blending with the clear liquid inside. Finally, the measuring cup shakes slightly above the pot.", "visual_elements": [ { "content": "A transparent measuring cup with white liquid enters from the left side of the frame.", "type": "scene", "weight": 3 }, { "content": "The measuring cup tilts, and the white liquid begins to pour into the pot, blending with the transparent liquid.", "type": "action", "weight": 3 }, { "content": "The measuring cup shakes slightly above the pot.", "type": "action", "weight": 2 } ] }, { "event": "Then, a blue bowl appears in the frame, filled with yellow macaroni. The bowl tilts, pouring the macaroni into the pot, where it spreads out in the white liquid.", "visual_elements": [ { "content": "A blue bowl appears in the frame.", "type": "scene", "weight": 3 }, { "content": "The bowl is filled with yellow macaroni.", "type": "attribute", "weight": 3 }, { "content": "The bowl tilts, and the macaroni pours into the pot.", "type": "action", "weight": 3 } ] }, { "event": "After all the macaroni is poured into the pot, a black stirring spoon stirs in the pot, evenly distributing the macaroni in the liquid.", "visual_elements": [ { "content": "A black stirring spoon stirs in the pot, evenly distributing the macaroni in the liquid.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 12 }, { "index": "TUNA_0564", "video_path": "YouCook2/106/coYqrXsDPdU/split_4.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 15.05, "resolution": { "width": 640, "height": 480 }, "caption": "The video shows a cooking process. In the frame, there's a silver pot containing white liquid with some black particles in it. The liquid keeps swirling in the pot, and two drops of black liquid fall into it.\nThen a metal whisk appears on the screen, stirring the liquid rapidly. Then a spoon filled with white particles appears on the screen. Spoon the white particles into the pot. Continue stirring with the egg beater until the particles and liquid are fully mixed.\nFinally, text prompts appear on the screen “reduce heat to med-low”.", "events": [ { "event": "The video shows a cooking process. In the frame, there's a silver pot containing white liquid with some black particles in it. The liquid keeps swirling in the pot, and two drops of black liquid fall into it.", "visual_elements": [ { "content": "The main focus of the frame is a silver pot containing white liquid, with some small black specks in it.", "type": "attribute", "weight": 3 }, { "content": "The liquid in the pot continuously swirls, and two drops of black liquid fall into the pot.", "type": "action", "weight": 3 } ] }, { "event": "Then a metal whisk appears on the screen, stirring the liquid rapidly. Then a spoon filled with white particles appears on the screen. Spoon the white particles into the pot. Continue stirring with the egg beater until the particles and liquid are fully mixed.", "visual_elements": [ { "content": "A metal whisk appears in the frame.", "type": "attribute", "weight": 3 }, { "content": "The whisk stirs the liquid rapidly.", "type": "action", "weight": 3 }, { "content": "A spoon filled with white granules appears in the frame.", "type": "attribute", "weight": 3 }, { "content": "The spoon pours the white granules into the pot.", "type": "action", "weight": 3 }, { "content": "Continue stirring with the egg beater until the particles and liquid are fully mixed.", "type": "action", "weight": 3 } ] }, { "event": "Finally, text prompts appear on the screen “reduce heat to med-low”.", "visual_elements": [ { "content": "Text prompts appear on the screen: “reduce heat to med-low.”", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 8 }, { "index": "TUNA_0565", "video_path": "YouCook2/106/xXSBn23NBJU/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 17.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen stovetop with a stainless steel pan with a lid and an aluminum baking sheet. On the stovetop near the camera, part of a pan containing white liquid is in the frame, and a transparent measuring cup containing white liquid appears in the frame. The measuring cup is tilted and the liquid slowly pours into the pan.\nNext, the camera moves up and down slightly, and more white liquid is poured into the pan. The liquid in the pan gradually increased, and some small bubbles began to appear on the surface.\nSubsequently, a hand holds a cuboid butter and put it into the liquid in the pan. Then, a hand holds a black spoon, stirring the liquid and butter in the pan.", "events": [ { "event": "The video begins with the camera focusing on a kitchen stovetop with a stainless steel pan with a lid and an aluminum baking sheet. On the stovetop near the camera, part of a pan containing white liquid is in the frame, and a transparent measuring cup containing white liquid appears in the frame. The measuring cup is tilted and the liquid slowly pours into the pan.", "visual_elements": [ { "content": "The camera focuses on a kitchen stovetop.", "type": "camera", "weight": 3 }, { "content": "There are a stainless steel pan with a lid and an aluminum baking sheet on the stovetop.", "type": "scene", "weight": 2 }, { "content": "On the stovetop near the camera, part of a pan containing white liquid is in the frame.", "type": "attribute", "weight": 3 }, { "content": "There is a transparent measuring cup in the picture.", "type": "attribute", "weight": 3 }, { "content": "The measuring cup contains white liquid.", "type": "attribute", "weight": 2 }, { "content": "The measuring cup is tilted and the liquid is slowly poured into the pan.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera moves up and down slightly, and more white liquid is poured into the pan. The liquid in the pan gradually increased, and some small bubbles began to appear on the surface.", "visual_elements": [ { "content": "The camera moves up and down slightly.", "type": "camera", "weight": 3 }, { "content": "More white liquid is poured into the pan.", "type": "action", "weight": 3 }, { "content": "The liquid in the pan gradually increased, and some small bubbles began to appear on the surface.", "type": "attribute", "weight": 1 } ] }, { "event": "Subsequently, a hand holds a cuboid butter and put it into the liquid in the pan. Then, a hand holds a black spoon, stirring the liquid and butter in the pan.", "visual_elements": [ { "content": "A hand holds a cuboid butter and put it into the liquid in the pan.", "type": "action", "weight": 3 }, { "content": "A hand holds a black spoon, stirring the liquid and butter in the pan.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0566", "video_path": "YouCook2/107/I9nR5rDP7nQ/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 28.0, "resolution": { "width": 640, "height": 360 }, "caption": "The video begins with the camera focusing on a man, wearing a white shirt and black pants, standing at a kitchen counter. In the background is a modern kitchen with a few kitchenware on the wall and a stainless steel oven and microwave next to it. The counter is lined with ingredients, including Onions and mushrooms.\nThe camera zooms in to show the man's hand handling a white ingredient. He cuts the food carefully with a knife skillfully. The camera then zooms in further to show him squeezing one end of the white ingredient between his fingers and gently lifting it. He puts his hand into the white ingredient and pulls out several transparent objects. The camera cuts to the man's face, and the man looks down and speaks. The camera then follows the man's movements and the man shows the object in his hand to the camera. The man then throws the transparent object with his right hand to his right.\nThe camera returns to his hand as he continues to cut the white piece of food with the knife. The camera then cuts to a wider view, showing him standing in front of the counter cutting the ingredients. Finally, the camera cuts to a close-up again, showing him placing the white ingredients on the counter and then continuing to cut the white ingredients. Finally, the camera cuts to a wide angle, then zooms in to focus on the man slicing the ingredients.", "events": [ { "event": "The video begins with the camera focusing on a man, wearing a white shirt and black pants, standing at a kitchen counter. In the background is a modern kitchen with a few kitchenware on the wall and a stainless steel oven and microwave next to it. The counter is lined with ingredients, including Onions and mushrooms.", "visual_elements": [ { "content": "The camera focusing on a man.", "type": "camera", "weight": 3 }, { "content": "A man stands at a kitchen counter who wears a white shirt and black pants.", "type": "attribute", "weight": 2 }, { "content": "In the background is a modern kitchen with a few kitchenware on the wall and a stainless steel oven and microwave next to it.", "type": "scene", "weight": 1 }, { "content": "The counter is lined with ingredients, including Onions and mushrooms.", "type": "scene", "weight": 1 } ] }, { "event": "The camera zooms in to show the man's hand handling a white ingredient. He cuts the food carefully with a knife skillfully. The camera then zooms in further to show him squeezing one end of the white ingredient between his fingers and gently lifting it. He puts his hand into the white ingredient and pulls out several transparent objects. The camera cuts to the man's face, and the man looks down and speaks. The camera then follows the man's movements and the man shows the object in his hand to the camera. The man then throws the transparent object with his right hand to his right.", "visual_elements": [ { "content": "The camera zooms in .", "type": "camera", "weight": 3 }, { "content": "The man's hand handling a white ingredient.", "type": "action", "weight": 3 }, { "content": "The man cuts the food carefully with a knife skillfully.", "type": "action", "weight": 3 }, { "content": "The camera then zooms in further.", "type": "camera", "weight": 3 }, { "content": "The man squeezes one end of the white ingredient between his fingers", "type": "action", "weight": 3 }, { "content": "The man puts his hand into the white ingredient and pulls out several transparent objects.", "type": "action", "weight": 3 }, { "content": "The camera cuts to the man's face.", "type": "camera", "weight": 3 }, { "content": "The man looks down and speaks.", "type": "action", "weight": 2 }, { "content": "The camera follows the man's movements.", "type": "camera", "weight": 3 }, { "content": "The man shows the object in his hand to the camera.", "type": "action", "weight": 3 }, { "content": "The man throws the transparent object with his right hand to his right.", "type": "action", "weight": 3 } ] }, { "event": "The camera returns to his hand as he continues to cut the white piece of food with the knife. The camera then cuts to a wider view, showing him standing in front of the counter cutting the ingredients. Finally, the camera cuts to a close-up again, showing him placing the white ingredients on the counter and then continuing to cut the white ingredients. Finally, the camera cuts to a wide angle, then zooms in to focus on the man slicing the ingredients.", "visual_elements": [ { "content": "The camera returns to his hand.", "type": "camera", "weight": 3 }, { "content": "The man continues to cut the white piece of food with the knife.", "type": "action", "weight": 3 }, { "content": "The camera then cuts to a wider view, showing him standing in front of the counter cutting the ingredients.", "type": "camera", "weight": 3 }, { "content": "The camera cuts to a close-up again.", "type": "camera", "weight": 3 }, { "content": "The man places the white ingredients on the counter and then continuing to cut the white ingredients.", "type": "action", "weight": 3 }, { "content": "The camera cuts to a wide angle, then zooms in.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0567", "video_path": "YouCook2/107/IW096cyCuaY/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 24.06, "resolution": { "width": 640, "height": 480 }, "caption": "The video begins with the camera focusing on a clear glass bowl that already contains some white flour in it. Then, a hand appears on the left side of the frame and picks up a small glass bowl to the left of the large glass bowl. The small glass bowl contains some brown powder. The hand pours the brown powder from the small bowl into the large glass bowl. The powder falls on top of the white powder. After pouring, the hand holds the small bowl and leaves from the left side of the frame.\nThen, the hand again picks up another small glass bowl next to the large bowl. The second small glass bowl contains some dark powder. The hand pours the dark powder into the large glass bowl. The hand holds the second small bowl and leaves from the left side of the frame.\nNext, the hand picks up the third small glass bowl to the left of the large glass bowl, and the hand tries to pour the powder from the small bowl into the large glass bowl, but not much powder comes out. With a metal spoon in hand, the powder is scooped out from the small bowl and poured into the large glass bowl. The left hand holds the glass bowl and the right hand holds the spoon and stirs in the glass bowl.", "events": [ { "event": "The video begins with the camera focusing on a clear glass bowl that already contains some white flour in it. Then, a hand appears on the left side of the frame and picks up a small glass bowl to the left of the large glass bowl. The small glass bowl contains some brown powder. The hand pours the brown powder from the small bowl into the large glass bowl. The powder falls on top of the white powder. After pouring, the hand holds the small bowl and leaves from the left side of the frame.", "visual_elements": [ { "content": "The camera focusing on a clear glass bowl.", "type": "camera", "weight": 3 }, { "content": "The bowl already contains some white flour in it.", "type": "attribute", "weight": 2 }, { "content": "A hand appears on the left side of the frame and picks up a small glass bowl to the left of the large glass bowl.", "type": "action", "weight": 3 }, { "content": "The small glass bowl contains some brown powder.", "type": "attribute", "weight": 1 }, { "content": "The hand pours the brown powder from the small bowl into the large glass bowl. And the powder falls on top of the white powder.", "type": "action", "weight": 3 }, { "content": "After pouring, the hand holds the small bowl and leaves from the left side of the frame.", "type": "action", "weight": 2 } ] }, { "event": "Then, the hand again picks up another small glass bowl next to the large bowl. The second small glass bowl contains some dark powder. The hand pours the dark powder into the large glass bowl. The hand holds the second small bowl and leaves from the left side of the frame.", "visual_elements": [ { "content": "The hand again picks up another small glass bowl next to the large bowl.", "type": "action", "weight": 3 }, { "content": "The second small glass bowl contains some dark powder.", "type": "attribute", "weight": 1 }, { "content": "The hand pours the dark powder into the large glass bowl.", "type": "action", "weight": 3 }, { "content": "The hand holds the second small bowl and leaves from the left side of the frame.", "type": "action", "weight": 2 } ] }, { "event": "Next, the hand picks up the third small glass bowl to the left of the large glass bowl, and the hand tries to pour the powder from the small bowl into the large glass bowl, but not much powder comes out. With a metal spoon in hand, the powder is scooped out from the small bowl and poured into the large glass bowl. The left hand holds the glass bowl and the right hand holds the spoon and stirs in the glass bowl.", "visual_elements": [ { "content": "The hand picks up the third small glass bowl to the left of the large glass bowl.", "type": "action", "weight": 3 }, { "content": "The hand tries to pour the powder from the small bowl into the large glass bowl, but not much powder comes out.", "type": "action", "weight": 3 }, { "content": "With a metal spoon in hand, the powder is scooped out from the small bowl and poured into the large glass bowl.", "type": "action", "weight": 3 }, { "content": "The left hand holds the glass bowl.", "type": "action", "weight": 3 }, { "content": "The right hand holds the spoon and stirs in the glass bowl.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0568", "video_path": "YouCook2/107/Nbl97Bp3Kzw/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 10.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a stainless steel basin containing sliced squid and flour. The squid appears pale pink and white, while the flour is white. The background is featured with a wooden workbench.\nNext, the camera cuts to a pair of hands that mix the squid and flour. The hands wear rings and the fingers are covered with flour. Toss the floury squid back and forth with both hands.\nThe camera zooms out a little to see the entire stainless steel basin and part of the workbench. The caption \"Gianni.tv to follow Gianni online\" appears on the screen. On the countertop was a large kitchen knife and a cut lemon. The camera moves to the right showing leaving the workbench while holding the floured squid slices in the right hand and the metal bowl in the left hand.", "events": [ { "event": "The video begins with the camera focusing on a stainless steel basin containing sliced squid and flour. The squid appears pale pink and white, while the flour is white. The background is featured with a wooden workbench.", "visual_elements": [ { "content": "The camera focusing on a stainless steel basin.", "type": "camera", "weight": 3 }, { "content": "The basin contains sliced squid and flour.", "type": "attribute", "weight": 3 }, { "content": "The squid appears pale pink and white, while the flour is white.", "type": "attribute", "weight": 2 }, { "content": "The background is featured with a wooden workbench.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera cuts to a pair of hands that mix the squid and flour. The hands wear rings and the fingers are covered with flour. Toss the floury squid back and forth with both hands.", "visual_elements": [ { "content": "The camera cuts to a pair of hands.", "type": "camera", "weight": 3 }, { "content": "The pair of hands mix the squid and flour.", "type": "action", "weight": 2 }, { "content": "The hands wear rings and the fingers are covered with flour.", "type": "attribute", "weight": 2 }, { "content": "Toss the floury squid back and forth with both hands.", "type": "action", "weight": 3 } ] }, { "event": "The camera zooms out a little to see the entire stainless steel basin and part of the workbench. The caption \"Gianni.tv to follow Gianni online\" appears on the screen. On the countertop was a large kitchen knife and a cut lemon. The camera moves to the right showing leaving the workbench while holding the floured squid slices in the right hand and the metal bowl in the left hand.", "visual_elements": [ { "content": "The camera zooms out a little to see the entire stainless steel basin and part of the workbench.", "type": "camera", "weight": 3 }, { "content": "The caption \"Gianni.tv to follow Gianni online\" appears on the screen.", "type": "scene", "weight": 1 }, { "content": "On the countertop was a large kitchen knife and a cut lemon.", "type": "scene", "weight": 1 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "Leave the workbench while holding the floured squid slices in the right hand and the metal bowl in the left hand.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0569", "video_path": "YouCook2/108/r4VSQuNE6D4/split_4.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 7.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is pointed at a pan with an undercooked pancake in the shape of Santa Claus' face. An adult male's hand appears on the left side of the screen. The pan handle is on the left side of the screen. The man's left hand gently holds the handle and his right hand holds a spatula. The background is a white kitchen countertop.\nThe man scoops the pancake from the bottom of the pan with a spatula and turns it over. With his left hand, he rotates the pan to the right, with the pan handle facing the lower left. The man turns the pancake over, with the pan handle facing the upper left, and the other side of the pancake is golden yellow.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a pan with an undercooked pancake in the shape of Santa Claus' face. An adult male's hand appears on the left side of the screen. The pan handle is on the left side of the screen. The man's left hand gently holds the handle and his right hand holds a spatula. The background is a white kitchen countertop.", "visual_elements": [ { "content": "The camera is focusing on a frying pan.", "type": "camera", "weight": 3 }, { "content": "There is a half-cooked pancake in the pan.", "type": "scene", "weight": 3 }, { "content": "The pancake is in the shape of Santa Claus's face.", "type": "attribute", "weight": 3 }, { "content": "An adult male's hand appears on the left side of the frame.", "type": "scene", "weight": 2 }, { "content": "The pan's handle is on the left side of the frame.", "type": "scene", "weight": 3 }, { "content": "The man's left hand gently holds the handle.", "type": "action", "weight": 2 }, { "content": "The right hand holds a spatula.", "type": "action", "weight": 2 }, { "content": "The background is a white kitchen countertop.", "type": "scene", "weight": 2 } ] }, { "event": "The man scoops the pancake from the bottom of the pan with a spatula and turns it over. With his left hand, he rotates the pan to the right, with the pan handle facing the lower left. The man turns the pancake over, with the pan handle facing the upper left, and the other side of the pancake is golden yellow.", "visual_elements": [ { "content": "The man uses the spatula to lift the pancake from the bottom of the pan and flip it over.", "type": "action", "weight": 3 }, { "content": "The pan's handle is facing the lower left.", "type": "attribute", "weight": 2 }, { "content": "The pan's handle is facing the upper left.", "type": "attribute", "weight": 2 }, { "content": "The other side of the pancake is golden yellow.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0570", "video_path": "YouCook2/109/DKk5IVcIWW8/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 11.0, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera focusing on a white bowl with a few chicken wings in it. The background is a blue countertop and stainless steel edges. A hand enters the frame from the right and places more chicken wings into the bowl.\nThe frame switches to a new scene, with the hand sprinkling salt into the bowl. The hand enters again from the right and sprinkles some pepper on the chicken wings.", "events": [ { "event": "The video begins with the camera focusing on a white bowl with a few chicken wings in it. The background is a blue countertop and stainless steel edges. A hand enters the frame from the right and places more chicken wings into the bowl.", "visual_elements": [ { "content": "The camera focuses on a white bowl.", "type": "camera", "weight": 3 }, { "content": "There are some chicken wings in the bowl.", "type": "scene", "weight": 3 }, { "content": "At the bottom of the screen is a white caption that reads, “When they are cut, put the bowls in a wing. Add a pinch of salt freshly ground white or black pepper.”", "type": "scene", "weight": 1 }, { "content": "In the background is a blue countertop with a stainless steel rim.", "type": "scene", "weight": 3 }, { "content": "The hands enter the scene from the right to place more wings in a bowl.", "type": "action", "weight": 2 } ] }, { "event": "The frame switches to a new scene, with the hand sprinkling salt into the bowl. The hand enters again from the right and sprinkles some pepper on the chicken wings.", "visual_elements": [ { "content": "The scene changes to a new one.", "type": "camera", "weight": 3 }, { "content": "The hand sprinkles salt into the bowl.", "type": "action", "weight": 2 }, { "content": "The hand sprinkles some pepper on the chicken wings.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0571", "video_path": "YouCook2/109/N6J2ZQYic_g/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 23.06, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a pair of hands unrolling a sheet of aluminum foil, which is laid flat on a baking sheet, and fingers gently press the edges of the foil to keep it close to the surface of the baking sheet.\nThe camera then switches to a plate of raw chicken wings next to the baking sheet. The hand holds a tong, picks up a chicken wing, and places it on the foil-lined baking sheet, where the wings are neatly arranged.\nThe camera switches again, and the baking sheet is almost full of chicken wings, and the hand continues to fill the baking sheet with chicken wings using the tongs.", "events": [ { "event": "The video begins with a pair of hands unrolling a sheet of aluminum foil, which is laid flat on a baking sheet, and fingers gently press the edges of the foil to keep it close to the surface of the baking sheet.", "visual_elements": [ { "content": "Fingers gently press the edges of the aluminum foil so that it is laid flat against the surface of the baking sheet.", "type": "action", "weight": 3 } ] }, { "event": "The camera then switches to a plate of raw chicken wings next to the baking sheet. The hand holds a tong, picks up a chicken wing, and places it on the foil-lined baking sheet, where the wings are neatly arranged.", "visual_elements": [ { "content": "And then the camera switches.", "type": "camera", "weight": 3 }, { "content": "Next to the baking sheet is a plate with raw chicken wings.", "type": "scene", "weight": 2 }, { "content": "The hand holds a clip.", "type": "action", "weight": 2 }, { "content": "The hand picks up a piece of chicken wing with the clip.", "type": "action", "weight": 2 }, { "content": "The hand picks up a piece of chicken wing with the clip and places it on the foil-lined baking sheet.", "type": "action", "weight": 2 }, { "content": "The wings are neatly arranged on the baking sheet.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches again, and the baking sheet is almost full of chicken wings, and the hand continues to fill the baking sheet with chicken wings using the tongs.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The baking sheet is almost full of wings.", "type": "scene", "weight": 3 }, { "content": "The hand holding the clip continues to arrange the wings on the baking sheet.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0572", "video_path": "YouCook2/109/N6J2ZQYic_g/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 9.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the inside of an oven, with a baking sheet full of raw chicken wings on the oven shelf. The wings are a fresh pink color. The camera pans slightly to the left, showing the full view of the baking tray, with the chicken wings neatly arranged on the baking tray.\nThe camera moves up to show a man holding the sides of a second baking sheet with both hands and slowly pushing it into the oven. The scene shows two baking trays filled with chicken wings on a rack in the center of the oven.", "events": [ { "event": "The video begins with the camera focusing on the inside of an oven, with a baking sheet full of raw chicken wings on the oven shelf. The wings are a fresh pink color. The camera pans slightly to the left, showing the full view of the baking tray, with the chicken wings neatly arranged on the baking tray.", "visual_elements": [ { "content": "The camera focuses on the inside of an oven.", "type": "camera", "weight": 3 }, { "content": "The wings are a fresh pink color.", "type": "attribute", "weight": 2 }, { "content": "The camera pans slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The camera pans slightly to the left to show a full view of the baking sheet.", "type": "scene", "weight": 2 }, { "content": "The wings are neatly arranged on the baking sheet", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves up to show a man holding the sides of a second baking sheet with both hands and slowly pushing it into the oven. The scene shows two baking trays filled with chicken wings on a rack in the center of the oven.", "visual_elements": [ { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "A man holds the sides of a second baking sheet with both hands.", "type": "action", "weight": 3 }, { "content": "The oven is slowly pushed into the oven.", "type": "action", "weight": 3 }, { "content": "Two baking trays filled with chicken wings are shown on a rack in the center of the oven.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0573", "video_path": "YouCook2/110/1mB0G1AwUPg/split_10.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 23.06, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen scene with a row of bread and two chalkboards filled with writing in the background, and an English black watermark ''TayTV'' in the bottom right corner of the video. There are two people in the scene, a male and a female. The male is wearing a black short-sleeved shirt with tattoos on his arms, and the female is wearing a white sweater with long hair draped over her shoulders.\nThe scene switches to an overhead view showing the contents of two large bowls. The bowl on the left contains a yellow liquid and the bowl on the right contains green lettuce and some crumbled bread cubes. The woman uses a spoon to scoop up some of the yellow liquid and pour it over the lettuce.\nThe camera again switches back to a frontal view as the male steps aside to explain something with hand gestures and the female reaches into the bowl with both hands to stir the lettuce in the bowl. The camera again switches to an overhead view as it zooms in to show a close-up of the female mixing the lettuce with her hands, the ring and watch on her hand can be seen in the frame as the camera switches to the woman stirring the man talking.", "events": [ { "event": "The video begins with the camera focusing on a kitchen scene with a row of bread and two chalkboards filled with writing in the background, and an English black watermark ''TayTV'' in the bottom right corner of the video. There are two people in the scene, a male and a female. The male is wearing a black short-sleeved shirt with tattoos on his arms, and the female is wearing a white sweater with long hair draped over her shoulders.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "In the background there is a row of bread and two blackboards with writing all over them", "type": "scene", "weight": 2 }, { "content": "In the bottom right corner of the video there is a black watermark in English, ''TayTV''.", "type": "scene", "weight": 1 }, { "content": "There are two people in the scene, a man and a woman.", "type": "scene", "weight": 3 }, { "content": "The male is wearing a black short-sleeved shirt.", "type": "attribute", "weight": 2 }, { "content": "There are tattoos on his arms.", "type": "attribute", "weight": 1 }, { "content": "The female is wearing a white sweater with long hair over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "The male is holding a large spoon and scooping up some liquid from a large bowl of yellow liquid.", "type": "action", "weight": 2 }, { "content": "The female also picks up a spoon and presents the liquid.", "type": "action", "weight": 2 } ] }, { "event": "The scene switches to an overhead view showing the contents of two large bowls. The bowl on the left contains a yellow liquid and the bowl on the right contains green lettuce and some crumbled bread cubes. The woman uses a spoon to scoop up some of the yellow liquid and pour it over the lettuce.", "visual_elements": [ { "content": "The frame switches to an overhead view.", "type": "camera", "weight": 3 }, { "content": "The frame shows the contents of two large bowls.", "type": "scene", "weight": 3 }, { "content": "The bowl on the left contains yellow liquid.", "type": "attribute", "weight": 2 }, { "content": "The bowl on the right contains green lettuce and some croutons.", "type": "attribute", "weight": 2 }, { "content": "The female picks up some of the yellow liquid with a spoon and pours it over the lettuce..", "type": "action", "weight": 2 } ] }, { "event": "The camera again switches back to a frontal view as the male steps aside to explain something with hand gestures and the female reaches into the bowl with both hands to stir the lettuce in the bowl. The camera again switches to an overhead view as it zooms in to show a close-up of the female mixing the lettuce with her hands, the ring and watch on her hand can be seen in the frame as the camera switches to the woman stirring the man talking.", "visual_elements": [ { "content": "The camera switches back to the front view again.", "type": "camera", "weight": 3 }, { "content": "The male is explaining something with hand gestures.", "type": "action", "weight": 2 }, { "content": "The female reaches into the bowl with both hands and mixes the lettuce.", "type": "action", "weight": 2 }, { "content": "The camera again switches to an overhead view.", "type": "camera", "weight": 3 }, { "content": "The camera zooms in to show a close-up of the female mixing the lettuce with her hands", "type": "scene", "weight": 2 }, { "content": "The ring and watch on her hand can be seen in the frame.", "type": "attribute", "weight": 1 }, { "content": "Camera switches to woman stirring man talking.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0574", "video_path": "YouCook2/110/1mB0G1AwUPg/split_8.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 8.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen scene with a chalkboard menu and some neatly arranged bread in the background. A male and a female are standing at the kitchen counter. The male is wearing a black, short-sleeved shirt with a red design on it and tattoos on his arms. The female is wearing a white sweater.\nThe male is holding a large kitchen knife and is cutting cooked meat, the female makes occasional gestures. The camera switches to a close-up shot focusing on the male's hand and the kitchen knife, showing details of him cutting the ingredients. The camera switches again to the female's hand as she too begins to cut the ingredients with the knife; the female is wearing a ring on her hand.\nAt the end of the video, the camera returns to the male's hand as he continues to cut the ingredients, switching again to the female cutting the meat.", "events": [ { "event": "The video begins with the camera focusing on a kitchen scene with a chalkboard menu and some neatly arranged bread in the background. A male and a female are standing at the kitchen counter. The male is wearing a black, short-sleeved shirt with a red design on it and tattoos on his arms. The female is wearing a white sweater.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "In the background there is a chalkboard menu and some neatly arranged loaves of bread", "type": "scene", "weight": 2 }, { "content": "A male and a female are standing in front of a kitchen counter.", "type": "scene", "weight": 2 }, { "content": "The male is wearing a black short-sleeved shirt.", "type": "attribute", "weight": 1 }, { "content": "The shirt has a red design on it.", "type": "attribute", "weight": 1 }, { "content": "The male has a tattoo on his arm.", "type": "attribute", "weight": 1 }, { "content": "The female is wearing a white sweater.", "type": "attribute", "weight": 1 } ] }, { "event": "The male is holding a large kitchen knife and is cutting cooked meat, the female makes occasional gestures. The camera switches to a close-up shot focusing on the male's hand and the kitchen knife, showing details of him cutting the ingredients. The camera switches again to the female's hand as she too begins to cut the ingredients with the knife; the female is wearing a ring on her hand.", "visual_elements": [ { "content": "The male is holding a large kitchen knife and is cutting cooked meat.", "type": "action", "weight": 3 }, { "content": "The female makes occasional gestures.", "type": "action", "weight": 2 }, { "content": "The camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the male's hand and the kitchen knife.", "type": "camera", "weight": 3 }, { "content": "Shows details of the male cutting the ingredients.", "type": "scene", "weight": 2 }, { "content": "The camera switches again to the female's hand.", "type": "camera", "weight": 3 }, { "content": "The female also starts to cut the ingredients with the knife.", "type": "action", "weight": 2 }, { "content": "The female has a ring on her hand.", "type": "attribute", "weight": 1 } ] }, { "event": "At the end of the video, the camera returns to the male's hand as he continues to cut the ingredients, switching again to the female cutting the meat.", "visual_elements": [ { "content": "The camera returns to the woman's hand.", "type": "camera", "weight": 3 }, { "content": "The male continues to cut the ingredients.", "type": "action", "weight": 2 }, { "content": "The camera switches again to the woman cutting meat.", "type": "camera", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0575", "video_path": "YouCook2/110/3tGhLc48eX8/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 9.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a kitchen scene with modern kitchen equipment and cabinets in the background. A female wearing a gray shirt stands at a workbench in the center of the kitchen with a large wooden bowl and a number of ingredients in front of her, including lettuce, lemons, a jar of spices, and a yellow cup, and she holds a bottle of olive oil in her left hand.\nThe female begins to pour the olive oil into the bowl and the camera switches to a close-up showing the olive oil slowly flowing out of the top of the bottle. Subtitle reads “Add 1/4 cup of olive oil”. The camera switches to a close-up of the woman pouring the olive oil and stirring the mixture in the bowl with her right hand. The final shot is an overhead shot of the entire scene.", "events": [ { "event": "The video begins with the camera focusing on a kitchen scene with modern kitchen equipment and cabinets in the background. A female wearing a gray shirt stands at a workbench in the center of the kitchen with a large wooden bowl and a number of ingredients in front of her, including lettuce, lemons, a jar of spices, and a yellow cup, and she holds a bottle of olive oil in her left hand.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "Modern kitchen equipment and cabinets in the background.", "type": "scene", "weight": 2 }, { "content": "A woman in a gray shirt stands at a workbench in the center of the kitchen.", "type": "scene", "weight": 2 }, { "content": "In front of the woman is a large wooden bowl and some ingredients.", "type": "attribute", "weight": 2 }, { "content": "The ingredients include lettuce, lemons, a jar of spices and a yellow cup.", "type": "attribute", "weight": 2 }, { "content": "In the woman's left hand is a bottle of olive oil.", "type": "action", "weight": 2 } ] }, { "event": "The female begins to pour the olive oil into the bowl and the camera switches to a close-up showing the olive oil slowly flowing out of the top of the bottle. Subtitle reads “Add 1/4 cup of olive oil”. The camera switches to a close-up of the woman pouring the olive oil and stirring the mixture in the bowl with her right hand. The final shot is an overhead shot of the entire scene.", "visual_elements": [ { "content": "The Woman begins to pour olive oil into a wooden bowl", "type": "action", "weight": 3 }, { "content": "The camera switches to a close-up", "type": "camera", "weight": 3 }, { "content": "The camera shows the olive oil slowly flowing out of the mouth of the bottle. Subtitles cue “Add 1/4 cup of olive oil”", "type": "attribute", "weight": 2 }, { "content": "The camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The woman pours the olive oil and stirs the mixture in the bowl with her right hand.", "type": "action", "weight": 2 }, { "content": "The final camera shot is an overhead shot of the entire scene.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0576", "video_path": "YouCook2/110/FNUumn079DM/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 9.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen scene. There is a red refrigerator in the background with various kitchen items on shelves next to it. A male stands in front of the kitchen counter wearing a black t-shirt with a green and white monogram. The male is holding a red pan and green tongs, and is performing a cooking operation as he holds the ingredients in the pan with the tongs.\nThe camera switches to a close-up shot of the man pouring oil from the pan into a rectangular silver container with a lot of white meat in it. The man takes a green clip and adds minced garlic from the pan to the container.", "events": [ { "event": "The video begins with the camera focusing on a kitchen scene. There is a red refrigerator in the background with various kitchen items on shelves next to it. A male stands in front of the kitchen counter wearing a black t-shirt with a green and white monogram. The male is holding a red pan and green tongs, and is performing a cooking operation as he holds the ingredients in the pan with the tongs.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "There is a red refrigerator in the background.", "type": "scene", "weight": 1 }, { "content": "There are various kitchen items on the shelves next to it.", "type": "scene", "weight": 2 }, { "content": "A male is standing in front of the kitchen counter.", "type": "scene", "weight": 3 }, { "content": "The male is wearing a black t-shirt.", "type": "attribute", "weight": 2 }, { "content": "The T-shirt has a green and white monogram on it.", "type": "attribute", "weight": 1 }, { "content": "The male is holding a red pan and green tongs for cooking.", "type": "action", "weight": 2 }, { "content": "The male uses the tongs to hold the ingredients in the pan.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a close-up shot of the man pouring oil from the pan into a rectangular silver container with a lot of white meat in it. The man takes a green clip and adds minced garlic from the pan to the container.", "visual_elements": [ { "content": "The camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The male pours oil from the pan into a rectangular silver container.", "type": "action", "weight": 3 }, { "content": "There is a lot of white meat in the container.", "type": "attribute", "weight": 2 }, { "content": "The male holds the green clamps.", "type": "action", "weight": 2 }, { "content": "Adds minced garlic from the pan to the container.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0577", "video_path": "YouCook2/110/M4cDslY_qCg/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 10.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins by focusing on a wooden cutting board where a man is cutting bread with a serrated knife and the bread is cut into individual bread cubes. The camera switches to an overhead angle to show the cut bread cubes being placed into a clear glass bowl. There is already some olive oil and seasoning in the bowl and the man uses both hands to place the bread cubes into the bowl.\nThe camera switches to show this person using his hands to mix the bread cubes with the seasoning, making sure that each piece of bread is evenly coated with the seasoning.", "events": [ { "event": "The video begins by focusing on a wooden cutting board where a man is cutting bread with a serrated knife and the bread is cut into individual bread cubes. The camera switches to an overhead angle to show the cut bread cubes being placed into a clear glass bowl. There is already some olive oil and seasoning in the bowl and the man uses both hands to place the bread cubes into the bowl.", "visual_elements": [ { "content": "The scene focuses on a wooden cutting board.", "type": "camera", "weight": 3 }, { "content": "A male is cutting bread with a serrated knife.", "type": "scene", "weight": 2 }, { "content": "The bread is cut into individual pieces.", "type": "scene", "weight": 2 }, { "content": "The camera switches to an overhead perspective.", "type": "camera", "weight": 3 }, { "content": "The frame shows the cut bread cubes being put into a clear glass bowl.", "type": "scene", "weight": 2 }, { "content": "There is already some olive oil and seasoning in the bowl.", "type": "attribute", "weight": 2 }, { "content": "The Male uses both hands to place the bread cubes into the bowl.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to show this person using his hands to mix the bread cubes with the seasoning, making sure that each piece of bread is evenly coated with the seasoning.", "visual_elements": [ { "content": "The scene switches to the person mixing the bread cubes and seasonings with his hands.", "type": "camera", "weight": 3 }, { "content": "Making sure that each piece of bread is evenly coated with the seasoning.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0578", "video_path": "YouCook2/110/k7EVFW1nCNI/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 11.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a clear glass bowl filled with fresh green lettuce leaves. A hand appears in the frame holding a small bowl with a white sauce and slowly pours the sauce into the glass bowl containing the lettuce. The sauce flows out of the small bowl and evenly covers the lettuce leaves. At this point, a wooden kitchen counter is in the background.\nThe scene then switches to a new scene with the bowl of lettuce oiled with white sauce and topped with a few bread cubes. Both hands are holding red tools to stir the lettuce and bread cubes to distribute the sauce more evenly over the lettuce.", "events": [ { "event": "The video begins with the camera focusing on a clear glass bowl filled with fresh green lettuce leaves. A hand appears in the frame holding a small bowl with a white sauce and slowly pours the sauce into the glass bowl containing the lettuce. The sauce flows out of the small bowl and evenly covers the lettuce leaves. At this point, a wooden kitchen counter is in the background.", "visual_elements": [ { "content": "The camera focuses on a clear glass bowl.", "type": "camera", "weight": 3 }, { "content": "The bowl is filled with fresh green lettuce leaves.", "type": "attribute", "weight": 2 }, { "content": "A hand appears in the frame.", "type": "scene", "weight": 2 }, { "content": "The hand is holding a small bowl with a white sauce.", "type": "action", "weight": 2 }, { "content": "The hand slowly pours the sauce into the glass bowl of lettuce.", "type": "action", "weight": 2 }, { "content": "The sauce flows out of the bowl.", "type": "scene", "weight": 1 }, { "content": "The sauce evenly covers the lettuce leaves.", "type": "attribute", "weight": 1 }, { "content": "A wooden kitchen table is in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The scene then switches to a new scene with the bowl of lettuce oiled with white sauce and topped with a few bread cubes. Both hands are holding red tools to stir the lettuce and bread cubes to distribute the sauce more evenly over the lettuce.", "visual_elements": [ { "content": "The scene changes to a new one.", "type": "scene", "weight": 2 }, { "content": "The bowl of lettuce is covered with a white sauce.", "type": "attribute", "weight": 2 }, { "content": "A few bread cubes are placed on top of the sauce.", "type": "attribute", "weight": 2 }, { "content": "Both hands are holding red tools to mix the lettuce and bread cubes.", "type": "action", "weight": 2 }, { "content": "to distribute the sauce more evenly over the lettuce.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0579", "video_path": "YouCook2/110/qpxPlQ0IfLk/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 9.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen scene with a gray brick wall in the background with some shelves on which various kitchen supplies and ingredients are arranged. In the center of the frame is a male wearing a colorful plaid shirt, standing in front of a cooking station with a jar of mayonnaise in his hand, scooping out some of the mayonnaise with a spoon.\nThe camera then switches to a close-up showing him putting the mayonnaise into a clear blender container. The camera switches back to a panoramic view showing him picking up a bottle of yellow relish, uncapping it, and preparing to add it to the blender. The camera switches back to close-up again to show him squeezing the dressing into the blender.\nThe camera switches back to a panoramic view showing him picking up a bottle of olive oil and opening the cap ready to pour it into the blender. The camera switches back to close-up again to show the olive oil being poured from the top of the bottle into the blender.", "events": [ { "event": "The video begins with the camera focusing on a kitchen scene with a gray brick wall in the background with some shelves on which various kitchen supplies and ingredients are arranged. In the center of the frame is a male wearing a colorful plaid shirt, standing in front of a cooking station with a jar of mayonnaise in his hand, scooping out some of the mayonnaise with a spoon.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "A gray brick wall in the background.", "type": "scene", "weight": 2 }, { "content": "There are shelves on the wall.", "type": "scene", "weight": 1 }, { "content": "On the shelves are various kitchen utensils and ingredients.", "type": "attribute", "weight": 1 }, { "content": "In the center of the frame is a man wearing a colorful plaid shirt.", "type": "scene", "weight": 3 }, { "content": "The man is standing in front of a kitchen table.", "type": "attribute", "weight": 2 }, { "content": "The man is holding a jar of mayonnaise.", "type": "attribute", "weight": 2 }, { "content": "The man uses a spoon to scoop out some mayonnaise.", "type": "action", "weight": 2 } ] }, { "event": "The camera then switches to a close-up showing him putting the mayonnaise into a clear blender container. The camera switches back to a panoramic view showing him picking up a bottle of yellow relish, uncapping it, and preparing to add it to the blender. The camera switches back to close-up again to show him squeezing the dressing into the blender.", "visual_elements": [ { "content": "The camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "The male is shown putting mayonnaise into a clear blender container.", "type": "action", "weight": 2 }, { "content": "The camera switches back to a panoramic view.", "type": "camera", "weight": 3 }, { "content": "The scene shows the male picking up a bottle of yellow mayonnaise.", "type": "action", "weight": 2 }, { "content": "The male opens the lid of the bottle.", "type": "action", "weight": 2 }, { "content": "The camera switches to a close-up again.", "type": "camera", "weight": 3 }, { "content": "Shows male squeezing mayonnaise into a blender.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches back to a panoramic view showing him picking up a bottle of olive oil and opening the cap ready to pour it into the blender. The camera switches back to close-up again to show the olive oil being poured from the top of the bottle into the blender.", "visual_elements": [ { "content": "The camera switches back to panorama.", "type": "camera", "weight": 3 }, { "content": "The male is shown picking up a bottle of olive oil.", "type": "action", "weight": 2 }, { "content": "The male opens the cap and pours it into the blender.", "type": "action", "weight": 2 }, { "content": "The camera switches to close-up again.", "type": "camera", "weight": 3 }, { "content": "Shows olive oil being poured from the bottle into the blender.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0580", "video_path": "YouCook2/110/qpxPlQ0IfLk/split_8.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 9.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a young male wearing a multi-colored plaid shirt standing at a kitchen counter. In the background is a gray brick wall with a number of shelves on which are various kitchen supplies and ingredients.\nThe young male holds the lid of a blender up as he puts some ingredients into the blender. The camera switches to him putting the lid back on the blender as he presses down on the lid with his hand to make sure it is tightly closed. He then starts the blender, which begins to operate and the ingredients inside begin to be blended.", "events": [ { "event": "The video begins with the camera focusing on a young male wearing a multi-colored plaid shirt standing at a kitchen counter. In the background is a gray brick wall with a number of shelves on which are various kitchen supplies and ingredients.", "visual_elements": [ { "content": "The camera focuses on a young male.", "type": "camera", "weight": 3 }, { "content": "The male is wearing a multi-colored plaid shirt.", "type": "attribute", "weight": 2 }, { "content": "The male is standing in front of a kitchen counter.", "type": "action", "weight": 2 }, { "content": "In the background is a gray brick wall.", "type": "scene", "weight": 2 }, { "content": "Above the wall are shelves.", "type": "attribute", "weight": 2 }, { "content": "On the shelves are various kitchen items and ingredients.", "type": "attribute", "weight": 1 } ] }, { "event": "The young male holds the lid of a blender up as he puts some ingredients into the blender. The camera switches to him putting the lid back on the blender as he presses down on the lid with his hand to make sure it is tightly closed. He then starts the blender, which begins to operate and the ingredients inside begin to be blended.", "visual_elements": [ { "content": "The young male lifts the lid of a blender.", "type": "action", "weight": 2 }, { "content": "The male puts some ingredients into the blender.", "type": "action", "weight": 2 }, { "content": "The camera switches to the male putting the lid back on the blender.", "type": "camera", "weight": 3 }, { "content": "The male presses down on the lid with his hand making sure the lid is tightly closed.", "type": "action", "weight": 2 }, { "content": "The male presses the button on the blender to make it work.", "type": "action", "weight": 2 }, { "content": "The camera switches to a closer view of the blender and shakes it slightly.", "type": "scene", "weight": 3 }, { "content": "Ingredients in the blender are blended into a gray-green puree.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0581", "video_path": "YouCook2/111/QLaA91I4t3U/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 18.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen counter. A male wearing a black short-sleeved t-shirt and khaki pants stands in front of the console. The male is wearing a silver watch on his hand, holding a knife with a red handle in his right hand and a green cabbage in his left.\nThe camera focuses on the male's hand as the male uses the knife to cut off the bottom and outer leaves of the cabbage. The camera zooms in slightly closer to show him cutting the cabbage in half and then quartering one of the halves. The camera switches to a closer angle showing the male placing the cut cabbage on a cutting board and continuing to cut it into thin strips. Once the cutting is complete the male places these thin strips of cabbage into a clear glass bowl.\nFinally, the camera focuses on the glass bowl filled with cabbage.", "events": [ { "event": "The video begins with the camera focusing on a kitchen counter. A male wearing a black short-sleeved t-shirt and khaki pants stands in front of the console. The male is wearing a silver watch on his hand, holding a knife with a red handle in his right hand and a green cabbage in his left.", "visual_elements": [ { "content": "The camera focuses on a kitchen counter.", "type": "camera", "weight": 3 }, { "content": "A man wearing a black short-sleeved T-shirt and khaki pants stands in front of the console.", "type": "scene", "weight": 3 }, { "content": "The male is wearing a silver-colored watch on his hand.", "type": "attribute", "weight": 2 }, { "content": "In the male's right hand is a knife with a red handle.", "type": "attribute", "weight": 2 }, { "content": "In the male's left hand, he is holding a green cabbage.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera focuses on the male's hand as the male uses the knife to cut off the bottom and outer leaves of the cabbage. The camera zooms in slightly closer to show him cutting the cabbage in half and then quartering one of the halves. The camera switches to a closer angle showing the male placing the cut cabbage on a cutting board and continuing to cut it into thin strips. Once the cutting is complete the male places these thin strips of cabbage into a clear glass bowl.", "visual_elements": [ { "content": "The camera zooms in slightly on the male's hand.", "type": "camera", "weight": 3 }, { "content": "The male uses the knife to cut off the bottom and outer leaves of the cabbage.", "type": "action", "weight": 2 }, { "content": "The camera shows the male cutting the cabbage in half.", "type": "action", "weight": 2 }, { "content": "The male then cuts one of the halves into quarters.", "type": "action", "weight": 1 }, { "content": "The camera switches to a closer angle.", "type": "camera", "weight": 3 }, { "content": "The male continues to cut the cabbage into thin strips.", "type": "action", "weight": 1 }, { "content": "The man puts the thin strips of cabbage into this clear glass bowl.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera focuses on the glass bowl filled with cabbage.", "visual_elements": [ { "content": "Finally, the camera focuses on the bowl full of cabbage.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0582", "video_path": "YouCook2/112/0VBNakxmATU/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 5.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera looking down to film a wooden counter with various ingredients and tools. In the center of the frame is a large white bowl with a yellow rim filled with Italian mostaccioli.To the right of the bowl is a large chef's knife, next to it are two small bowls containing salt and pepper, and further to the right is a whisk and a large blue bowl containing some sort of liquid. On the left side of the bowl is a piece of cut butter and a small clear bowl, and further to the left is a wooden spoon and a blue measuring cup. Next to the large yellow bowl is a white caption that reads ''cooked mostaccioli''.\nThe camera switches to a close-up as a pair of hands wearing multiple bracelets appear in the frame, placing some small yellow and red tomatoes and cheese cubes into the bowl of spaghetti and repeating several times.", "events": [ { "event": "The video begins with the camera looking down to film a wooden counter with various ingredients and tools. In the center of the frame is a large white bowl with a yellow rim filled with Italian mostaccioli.To the right of the bowl is a large chef's knife, next to it are two small bowls containing salt and pepper, and further to the right is a whisk and a large blue bowl containing some sort of liquid. On the left side of the bowl is a piece of cut butter and a small clear bowl, and further to the left is a wooden spoon and a blue measuring cup. Next to the large yellow bowl is a white caption that reads ''cooked mostaccioli''.", "visual_elements": [ { "content": "The camera looks down on a wooden counter.", "type": "camera", "weight": 3 }, { "content": "There are various ingredients and tools on it", "type": "scene", "weight": 2 }, { "content": "There is a large white bowl with a yellow rim.", "type": "attribute", "weight": 2 }, { "content": "It is filled with Italian mostacciol.", "type": "attribute", "weight": 2 }, { "content": "To the right of the bowl is a large kitchen knife.", "type": "attribute", "weight": 1 }, { "content": "Next to the knife are two small bowls.", "type": "attribute", "weight": 1 }, { "content": "Two small bowls filled with salt and pepper.", "type": "attribute", "weight": 1 }, { "content": "To the right is a whisk and a large blue bowl.", "type": "attribute", "weight": 1 }, { "content": "The large blue bowl contains some kind of liquid.", "type": "attribute", "weight": 1 }, { "content": "On the counter, there are cubes of butter and small tomatoes.", "type": "attribute", "weight": 2 }, { "content": "Next to the large white bowl is a small clear bowl.", "type": "attribute", "weight": 2 }, { "content": "To the left is a wooden spoon and a blue measuring cup.", "type": "attribute", "weight": 1 }, { "content": "Next to the large yellow bowl is a white caption that reads ''cooked mostaccioli''.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches to a close-up as a pair of hands wearing multiple bracelets appear in the frame, placing some small yellow and red tomatoes and cheese cubes into the bowl of spaghetti and repeating several times.", "visual_elements": [ { "content": "The camera switches to a close-up.", "type": "camera", "weight": 3 }, { "content": "A pair of hands wearing multiple bracelets appear in the frame.", "type": "attribute", "weight": 1 }, { "content": "Some small yellow and red tomatoes and cheese cubes are added to the bowl of spaghetti and the process is repeated several times.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0583", "video_path": "YouCook2/112/osov-0HY1Lw/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 8.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a kitchen sink in which a metal strainer has been placed. A male hand, wearing a silver watch, is shown holding a black pot filled with cooked macaroni, which is poured into the colander along with a stream of water.\nThe scene switches to a male wearing a blue shirt and black apron, standing next to the sink, holding the sides of the colander with both hands and gently shaking the colander to further strain the water out of the macaroni.", "events": [ { "event": "The video begins with the camera focusing on a kitchen sink in which a metal strainer has been placed. A male hand, wearing a silver watch, is shown holding a black pot filled with cooked macaroni, which is poured into the colander along with a stream of water.", "visual_elements": [ { "content": "The camera focuses on a kitchen sink.", "type": "camera", "weight": 3 }, { "content": "A metal strainer is placed inside the sink", "type": "scene", "weight": 3 }, { "content": "A male hand appears in the frame.", "type": "scene", "weight": 2 }, { "content": "The male is wearing a silver watch.", "type": "attribute", "weight": 2 }, { "content": "The male is holding a black pot filled with cooked macaroni.", "type": "action", "weight": 2 }, { "content": "The macaroni is poured into the colander along with the water.", "type": "attribute", "weight": 2 } ] }, { "event": "The scene switches to a male wearing a blue shirt and black apron, standing next to the sink, holding the sides of the colander with both hands and gently shaking the colander to further strain the water out of the macaroni.", "visual_elements": [ { "content": "The scene changes to a male wearing a blue shirt and black apron.", "type": "camera", "weight": 3 }, { "content": "The male is standing next to the sink.", "type": "scene", "weight": 2 }, { "content": "The male holds the sides of the colander with both hands and gently shakes the colander.", "type": "action", "weight": 2 }, { "content": "The male further filters out the water from the macaroni.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0584", "video_path": "YouCook2/112/osov-0HY1Lw/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 18.02, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a kitchen scene. A male stands at the island counter in the center of the kitchen, wearing a blue shirt and dark apron with the word “DAVID” printed there. There is a watch on his hand as he prepares ingredients.\nThe camera switches to a distant view, showing him continuing to grate carrots, the shreds of which are piled up there on a cutting board.", "events": [ { "event": "The video begins with the camera focusing on a kitchen scene. A male stands at the island counter in the center of the kitchen, wearing a blue shirt and dark apron with the word “DAVID” printed there. There is a watch on his hand as he prepares ingredients.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "The male stands in front of the island counter in the center of the kitchen.", "type": "scene", "weight": 2 }, { "content": "The male is wearing a blue shirt and a dark colored apron.", "type": "attribute", "weight": 2 }, { "content": "There is the word “DAVID” printed on the apron.", "type": "attribute", "weight": 2 }, { "content": "There is a watch on the male's hand.", "type": "attribute", "weight": 1 }, { "content": "The male is preparing ingredients.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to a distant view, showing him continuing to grate carrots, the shreds of which are piled up there on a cutting board.", "visual_elements": [ { "content": "The camera switches to a distant view.", "type": "camera", "weight": 3 }, { "content": "The male continues to grate carrots.", "type": "action", "weight": 2 }, { "content": "Shredded carrots are piled up there on a cutting board.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0585", "video_path": "YouCook2/112/pwfzJlVHLJM/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 7.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen counter on which a wooden cutting board is placed, and there are several small transparent bowls containing chopped carrots, chopped green onions, sliced ginger, flour, and other seasonings. A female wearing a light-colored, long-sleeved blouse stands in front of the operating table, and the female wears a black watch on her wrist.\nThe female holds a clear bowl with cooked macaroni in left hand and a clear spatula in her right hand. The female pours the macaroni into a larger clear glass bowl and the spatula is used to scrape the remaining macaroni from the bowl.\nThe camera switches to a wider view of the entire kitchen in the background. The female puts down the spatula and picks up the empty bowl with both hands, placing the empty bowl on the side of the cutting board, facing the camera as she speaks.", "events": [ { "event": "The video begins with the camera focusing on a kitchen counter on which a wooden cutting board is placed, and there are several small transparent bowls containing chopped carrots, chopped green onions, sliced ginger, flour, and other seasonings. A female wearing a light-colored, long-sleeved blouse stands in front of the operating table, and the female wears a black watch on her wrist.", "visual_elements": [ { "content": "The camera focuses on a kitchen counter.", "type": "camera", "weight": 3 }, { "content": "There is a wooden cutting board on top of the counter.", "type": "scene", "weight": 2 }, { "content": "There are several small transparent bowls on the cutting board.", "type": "scene", "weight": 2 }, { "content": "Inside are chopped carrots, chopped green onions, ginger, flour and other seasonings.", "type": "attribute", "weight": 1 }, { "content": "A woman wearing a light-colored, long-sleeved blouse stands in front of the counter.", "type": "scene", "weight": 3 }, { "content": "There is a black watch on her wrist.", "type": "attribute", "weight": 2 } ] }, { "event": "The female holds a clear bowl with cooked macaroni in left hand and a clear spatula in her right hand. The female pours the macaroni into a larger clear glass bowl and the spatula is used to scrape the remaining macaroni from the bowl.", "visual_elements": [ { "content": "With the female's left hand, the female picks up a transparent bowl containing cooked macaroni.", "type": "action", "weight": 2 }, { "content": "In the female's right hand, she holds a transparent spatula.", "type": "action", "weight": 2 }, { "content": "The female pours the macaroni into a larger clear glass bowl.", "type": "action", "weight": 2 }, { "content": "The spatula is used to scrape the remaining macaroni from the bowl.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a wider view of the entire kitchen in the background. The female puts down the spatula and picks up the empty bowl with both hands, placing the empty bowl on the side of the cutting board, facing the camera as she speaks.", "visual_elements": [ { "content": "The camera switches to a wider view.", "type": "camera", "weight": 3 }, { "content": "The entire kitchen can be seen in the background", "type": "scene", "weight": 2 }, { "content": "The female places the spatula and the empty bowl on either side of the cutting board.", "type": "action", "weight": 2 }, { "content": "The female gestures with her hands in front of her chest as she speaks to the camera", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0586", "video_path": "YouCook2/113/DvAiRex4JlQ/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 14.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen scene. A female wearing a blue shirt stands in front of a console in the center of the kitchen. The female's hair is tied back in a ponytail and there are earrings in her ears. There is some cookware and ingredients on the console, including a loaf of bread, some butter, and a few slices of ham.\nThe scene switches to the female preparing the ingredients at the console as the female carefully lays the slices of ham on top of the cheese, one by one. The shot switches back to a panoramic view of the woman standing at the operating table, smiling and holding another piece of bread.", "events": [ { "event": "The video begins with the camera focusing on a kitchen scene. A female wearing a blue shirt stands in front of a console in the center of the kitchen. The female's hair is tied back in a ponytail and there are earrings in her ears. There is some cookware and ingredients on the console, including a loaf of bread, some butter, and a few slices of ham.", "visual_elements": [ { "content": "The camera focuses on a kitchen scene", "type": "camera", "weight": 3 }, { "content": "A female wearing a blue shirt stands in front of a counter in the center of the kitchen.", "type": "scene", "weight": 3 }, { "content": "The female's hair is tied back in a ponytail.", "type": "attribute", "weight": 2 }, { "content": "There are earrings in the female's ears.", "type": "attribute", "weight": 2 }, { "content": "There are a number of kitchen utensils and ingredients on the counter.", "type": "scene", "weight": 2 }, { "content": "The ingredients include a loaf of bread, some butter and a few slices of ham.", "type": "attribute", "weight": 1 } ] }, { "event": "The scene switches to the female preparing the ingredients at the console as the female carefully lays the slices of ham on top of the cheese, one by one. The shot switches back to a panoramic view of the woman standing at the operating table, smiling and holding another piece of bread.", "visual_elements": [ { "content": "The scene changes to the female preparing the ingredients at the counter.", "type": "camera", "weight": 3 }, { "content": "The female carefully lays the slices of ham on top of the cheese.", "type": "action", "weight": 2 }, { "content": "The camera switches back to panorama.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0587", "video_path": "YouCook2/113/GaxyzK2mHqw/split_4.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 11.01, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a stovetop knob and a close-up showing a hand turning the knob. The camera then switches to a pan, and a hand holds a slice of bread with cheese in it and places it in the pan. The hand picks up a lid from the right side of the frame and puts it over the pan.\nThe camera switches to a close-up shot and a line of text appears at the bottom of the screen: “GIVE YOUR SANDWICH SHELTER” A shot of the sandwich under the lid of the pan, where there is some water vapor condensation.", "events": [ { "event": "The video begins with the camera focusing on a stovetop knob and a close-up showing a hand turning the knob. The camera then switches to a pan, and a hand holds a slice of bread with cheese in it and places it in the pan. The hand picks up a lid from the right side of the frame and puts it over the pan.", "visual_elements": [ { "content": "The camera focuses on a stovetop knob.", "type": "camera", "weight": 3 }, { "content": "Close-up showing a hand turning a knob.", "type": "scene", "weight": 2 }, { "content": "The camera switches to a pan.", "type": "camera", "weight": 3 }, { "content": "A hand holds a slice of bread with cheese in it and places it in the pan.", "type": "action", "weight": 3 }, { "content": "A hand picks up a lid from the right side of the frame and places it over the pan.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a close-up shot and a line of text appears at the bottom of the screen: “GIVE YOUR SANDWICH SHELTER” A shot of the sandwich under the lid of the pan, where there is some water vapor condensation.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "A line of text appears at the bottom of the screen: “GIVE YOUR SANDWICH SHELTER”.", "type": "attribute", "weight": 1 }, { "content": "The camera takes a shot of the sandwich under the lid of the pan.", "type": "camera", "weight": 3 }, { "content": "There is some water vapor condensation on the lid of the pan.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0588", "video_path": "YouCook2/114/AZDUdr9G318/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 17.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a kitchen sink that holds a purple colander. A hand appears holding a stainless steel pot with a gold handle containing chunks of boiled potatoes. The hand pours the potato chunks from the pot into the purple colander.\nThe camera switches to a wooden countertop on which a large clear glass bowl rests. A hand appears again, holding a purple water colander, and pours the potato chunks from the colander into the glass bowl, a then pours the butter chunks over the potato chunks in the glass bowl.", "events": [ { "event": "The video begins with the camera focusing on a kitchen sink that holds a purple colander. A hand appears holding a stainless steel pot with a gold handle containing chunks of boiled potatoes. The hand pours the potato chunks from the pot into the purple colander.", "visual_elements": [ { "content": "The camera focuses on a kitchen sink", "type": "camera", "weight": 3 }, { "content": "A purple colander is placed in the sink.", "type": "scene", "weight": 2 }, { "content": "A hand appears in the frame.", "type": "scene", "weight": 2 }, { "content": "The hand is holding a stainless steel pot with a gold handle.", "type": "action", "weight": 2 }, { "content": "The pot contains boiled potatoes.", "type": "attribute", "weight": 2 }, { "content": "The hand pours the potatoes from the pot into the purple colander.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a wooden countertop on which a large clear glass bowl rests. A hand appears again, holding a purple water colander, and pours the potato chunks from the colander into the glass bowl, a then pours the butter chunks over the potato chunks in the glass bowl.", "visual_elements": [ { "content": "The camera switches to a wooden countertop.", "type": "camera", "weight": 3 }, { "content": "There is a large transparent glass bowl on the countertop.", "type": "scene", "weight": 2 }, { "content": "A hand appears again", "type": "scene", "weight": 2 }, { "content": "The hand is holding a purple colander.", "type": "action", "weight": 2 }, { "content": "The hand holding a purple colander pours the potatoes from the colander into the glass bowl.", "type": "action", "weight": 2 }, { "content": "The hand then pours the butter into the glass bowl over the potatoes there.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0589", "video_path": "YouCook2/114/DsKNKsXCLJw/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 11.05, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a stainless steel pot, and the camera switches to a white container containing water that is poured into the pot. The camera moves to the lower left corner and focuses on the stream of water and the potato chunks there. The stream of water continues to fill the pot, the water level gradually rises, and the potato chunks begin to float.", "events": [ { "event": "The video begins with the camera focusing on a stainless steel pot, and the camera switches to a white container containing water that is poured into the pot. The camera moves to the lower left corner and focuses on the stream of water and the potato chunks there. The stream of water continues to fill the pot, the water level gradually rises, and the potato chunks begin to float.", "visual_elements": [ { "content": "The camera focuses on a stainless steel pan.", "type": "camera", "weight": 3 }, { "content": "The pot is filled with chopped potatoes.", "type": "attribute", "weight": 2 }, { "content": "The camera switches to a white container", "type": "camera", "weight": 3 }, { "content": "The container is filled with water and poured into the pot", "type": "scene", "weight": 2 }, { "content": "The camera moves to the lower left corner", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the stream of water and the potato cubes there.", "type": "scene", "weight": 2 }, { "content": "The stream of water continues to pour into the pot.", "type": "attribute", "weight": 2 }, { "content": "The water level in the pot gradually rises there.", "type": "attribute", "weight": 1 }, { "content": "The potato cubes begin to float.", "type": "attribute", "weight": 2 } ] } ], "n_events": 1, "n_elements": 9 }, { "index": "TUNA_0590", "video_path": "YouCook2/114/HbXjlFaPzOM/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 19.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on a metal colander, which is placed in a stainless steel sink. A pot containing cubed potatoes, water from the pot and potatoes are poured into the colander and the potatoes emit heat.\nThe camera switches to an empty pot into which chunks of potatoes from the colander are poured.", "events": [ { "event": "At the beginning of the video, the camera focuses on a metal colander, which is placed in a stainless steel sink. A pot containing cubed potatoes, water from the pot and potatoes are poured into the colander and the potatoes emit heat.", "visual_elements": [ { "content": "The camera focuses on a metal colander.", "type": "camera", "weight": 3 }, { "content": "The colander is placed in a stainless steel sink.", "type": "scene", "weight": 2 }, { "content": "A stainless steel pot containing cubed potatoes", "type": "attribute", "weight": 2 }, { "content": "The pot of water and the potatoes are poured into the colander.", "type": "action", "weight": 2 }, { "content": "The potatoes are steaming.", "type": "attribute", "weight": 1 }, { "content": "A pair of hands enters the frame and picks up the colander and shakes it up and down to filter the water completely.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to an empty pot into which chunks of potatoes from the colander are poured.", "visual_elements": [ { "content": "The camera switches to an empty pot.", "type": "camera", "weight": 3 }, { "content": "The potato cubes in the colander are poured into the pot.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0591", "video_path": "YouCook2/115/B09O9Nfwm_s/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 19.0, "resolution": { "width": 854, "height": 468 }, "caption": "The video begins with the camera focusing on a wooden cutting board with several red hot dog sausages neatly arranged on it. In the upper left corner of the screen there is a hand holding a sharp knife cutting hot dogs, and neatly arranged on the board are hot dog sausages cut in half. A text is displayed in the upper right corner of the screen, “If making mini corn dogs cut hot dogs in half.” A hand picks up a bamboo skewer and inserts it into a hot dog sausage. In the upper right corner there is text, “Insert a skewer into each of the hot dogs”.\nThe scene switches to a cutting board where hot dogs with inserted bamboo skewers are lined up, and the hand inserts the remaining hot dogs with bamboo skewers and returns them to the cutting board.", "events": [ { "event": "The video begins with the camera focusing on a wooden cutting board with several red hot dog sausages neatly arranged on it. In the upper left corner of the screen there is a hand holding a sharp knife cutting hot dogs, and neatly arranged on the board are hot dog sausages cut in half. A text is displayed in the upper right corner of the screen, “If making mini corn dogs cut hot dogs in half.” A hand picks up a bamboo skewer and inserts it into a hot dog sausage. In the upper right corner there is text, “Insert a skewer into each of the hot dogs”.", "visual_elements": [ { "content": "The camera focuses on a wooden cutting board.", "type": "camera", "weight": 3 }, { "content": "There are several red hot dog sausages neatly arranged on the wooden cutting board.", "type": "scene", "weight": 3 }, { "content": "There is a pair of hands in the frame.", "type": "scene", "weight": 2 }, { "content": "The man's left hand is holding down the side of the hot dog and his right hand is holding a knife to cut the hot dog.", "type": "action", "weight": 2 }, { "content": "There are hot dog sausages neatly arranged on the board, cut in half.", "type": "attribute", "weight": 2 }, { "content": "In the upper right corner of the screen, there is a text that reads, “If making mini corn dogs cut hot dogs in half.”", "type": "attribute", "weight": 1 }, { "content": "A hand picks up a bamboo skewer and inserts it into a hot dog sausage.", "type": "action", "weight": 3 }, { "content": "In the upper right corner there is text: “Insert a skewer into each of the hot dogs”", "type": "attribute", "weight": 1 } ] }, { "event": "The scene switches to a cutting board where hot dogs with inserted bamboo skewers are lined up, and the hand inserts the remaining hot dogs with bamboo skewers and returns them to the cutting board.", "visual_elements": [ { "content": "The scene changes to a cutting board where there are hot dogs with skewers inserted.", "type": "camera", "weight": 3 }, { "content": "The hand returns the remaining hot dogs to the cutting board after inserting the skewers.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0592", "video_path": "YouCook2/115/B09O9Nfwm_s/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 19.0, "resolution": { "width": 854, "height": 468 }, "caption": "At the beginning of the video, the camera focuses on a skillet with oil sitting there on the stove. In the upper left corner of the frame there is the text prompt, “Coat a hot dog then place into oil.” A hand then appears in the upper right of the frame, holding a hot dog with a stick inserted into it covered in batter, and the hand places the hot dog into the pan. The scene switches to the hand placing a second hot dog in the pan, and there is text in the upper left corner of the scene that says, “Cook for about 5 minutes or until golden brown.”\nThe screen switches to hands placing a second hot dog in the pan, and there is a text prompt in the upper left corner of the screen, “Cook for about 5 minutes or until golden brown.” As the hot dogs gradually turn golden brown and the bubbles in the oil increase, the hand adds a third hot dog to the pan.\nThe scene switches again and shows three hot dogs that have been cooked until golden brown. The hand lifts one hot dog out of the oil.", "events": [ { "event": "At the beginning of the video, the camera focuses on a skillet with oil sitting there on the stove. In the upper left corner of the frame there is the text prompt, “Coat a hot dog then place into oil.” A hand then appears in the upper right of the frame, holding a hot dog with a stick inserted into it covered in batter, and the hand places the hot dog into the pan. The scene switches to the hand placing a second hot dog in the pan, and there is text in the upper left corner of the scene that says, “Cook for about 5 minutes or until golden brown.”", "visual_elements": [ { "content": "The camera focuses on a skillet with oil sitting there on the stove.", "type": "camera", "weight": 3 }, { "content": "In the upper left corner of the screen there is a text prompt: “Coat a hot dog then place into oil.”", "type": "attribute", "weight": 1 }, { "content": "A hand appears there in the upper right corner of the frame.", "type": "scene", "weight": 3 }, { "content": "The hand is holding a hot dog with a stick in it, covered in batter.", "type": "action", "weight": 2 }, { "content": "The hand places the hot dog into the pan.", "type": "action", "weight": 2 } ] }, { "event": "The screen switches to hands placing a second hot dog in the pan, and there is a text prompt in the upper left corner of the screen, “Cook for about 5 minutes or until golden brown.” As the hot dogs gradually turn golden brown and the bubbles in the oil increase, the hand adds a third hot dog to the pan.", "visual_elements": [ { "content": "The scene changes to the hand placing a second hot dog into the pan.", "type": "camera", "weight": 3 }, { "content": "In the upper left corner of the screen, there is a text: “Cook for about 5 minutes or until golden brown.”", "type": "attribute", "weight": 1 }, { "content": "The hot dog turns golden brown.", "type": "attribute", "weight": 2 }, { "content": "There are more and more bubbles in the oil.", "type": "attribute", "weight": 2 }, { "content": "The hand adds a third hot dog to the pan.", "type": "action", "weight": 2 } ] }, { "event": "The scene switches again and shows three hot dogs that have been cooked until golden brown. The hand lifts one hot dog out of the oil.", "visual_elements": [ { "content": "The scene changes again.", "type": "camera", "weight": 3 }, { "content": "Three hot dogs are now golden brown.", "type": "attribute", "weight": 2 }, { "content": "The hand removes a hot dog from the oil.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0593", "video_path": "YouCook2/115/hnxHxJ1BKXA/split_7.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 16.06, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a stainless steel pan filled with hot oil, with tiny bubbles rising from the oil. One hand is shown holding a hot dog sausage and the hand eases this piece of food into the hot oil, which immediately floods the surface with a large number of bubbles.\nThe scene switches to the hand placing another batter-covered piece of hot dog into the oil, the hot dog placed first taking on a golden color. The hand picks up a handful of metal clips and holds one of the hot dog sausages in turn to flip it over.", "events": [ { "event": "The video begins with the camera focusing on a stainless steel pan filled with hot oil, with tiny bubbles rising from the oil. One hand is shown holding a hot dog sausage and the hand eases this piece of food into the hot oil, which immediately floods the surface with a large number of bubbles.", "visual_elements": [ { "content": "The camera focuses on a stainless steel pan.", "type": "camera", "weight": 3 }, { "content": "The pan is filled with hot oil.", "type": "scene", "weight": 2 }, { "content": "There are tiny bubbles on the surface of the oil.", "type": "attribute", "weight": 2 }, { "content": "A hand holds a hot dog sausage.", "type": "scene", "weight": 3 }, { "content": "The hand slowly drops the piece of food into the hot oil.", "type": "action", "weight": 2 }, { "content": "A large number of bubbles immediately form on the surface of the oil.", "type": "attribute", "weight": 2 } ] }, { "event": "The scene switches to the hand placing another batter-covered piece of hot dog into the oil, the hot dog placed first taking on a golden color. The hand picks up a handful of metal clips and holds one of the hot dog sausages in turn to flip it over.", "visual_elements": [ { "content": "The scene changes to the hand placing another battered hot dog into the oil.", "type": "camera", "weight": 3 }, { "content": "The first hot dog is golden brown in color.", "type": "attribute", "weight": 2 }, { "content": "The hand picks up a metal clip", "type": "action", "weight": 3 }, { "content": "The hand picks up a metal clamp and turns the hot dog sausage over by holding them one by one.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0594", "video_path": "YouCook2/115/jY6HcQVqLgw/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 7.04, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a silver metal bowl containing a yellow liquid with black pepper sprinkled on top of the liquid. In the background is a light gray countertop. A hand then appears in the frame holding a white cup with a white liquid inside. The hand pours the liquid from the cup into a bowl, and a white foam appears on the surface of the liquid in the bowl.\nThe scene switches and a hand again appears holding a white measuring cup containing an orange-colored liquid. The hand pours the liquid into the bowl and the liquid mixes with the other liquids in the bowl.", "events": [ { "event": "The video begins with the camera focusing on a silver metal bowl containing a yellow liquid with black pepper sprinkled on top of the liquid. In the background is a light gray countertop. A hand then appears in the frame holding a white cup with a white liquid inside. The hand pours the liquid from the cup into a bowl, and a white foam appears on the surface of the liquid in the bowl.", "visual_elements": [ { "content": "The camera focuses on a silver metal bowl.", "type": "camera", "weight": 3 }, { "content": "Inside the bowl is a yellow liquid.", "type": "scene", "weight": 2 }, { "content": "There is black pepper sprinkled on top of the liquid.", "type": "attribute", "weight": 2 }, { "content": "In the background is a light gray countertop.", "type": "scene", "weight": 2 }, { "content": "A hand appears in the frame.", "type": "scene", "weight": 3 }, { "content": "The hand is holding a white cup.", "type": "action", "weight": 3 }, { "content": "The cup contains a white liquid.", "type": "attribute", "weight": 2 }, { "content": "The hand pours the liquid from the cup into a bowl.", "type": "action", "weight": 3 }, { "content": "White bubbles appear on the surface of the liquid in the bowl.", "type": "attribute", "weight": 1 } ] }, { "event": "The scene switches and a hand again appears holding a white measuring cup containing an orange-colored liquid. The hand pours the liquid into the bowl and the liquid mixes with the other liquids in the bowl.", "visual_elements": [ { "content": "The scene changes.", "type": "camera", "weight": 3 }, { "content": "The hand appears again.", "type": "scene", "weight": 3 }, { "content": "The hand is holding a white measuring cup.", "type": "action", "weight": 2 }, { "content": "The measuring cup contains an orange-colored liquid.", "type": "attribute", "weight": 2 }, { "content": "The hand pours the liquid into the bowl.", "type": "action", "weight": 3 }, { "content": "The liquid mixes with the other liquids in the bowl.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0595", "video_path": "YouCook2/116/D-Dkz8cX3Vg/split_4.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 9.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a baking sheet lined with baking paper, on top of which sits a round piece of pizza dough, which is evenly sprinkled with grated cheese. A person's hand appears to cover another piece of dough with cheese. Fingers gently press the edges of the dough, sealing the top and bottom layers together tightly.\nNext, the camera switches to look down on the entire baking sheet and the dough is shown to have completely covered the cheese.", "events": [ { "event": "The video begins with the camera focusing on a baking sheet lined with baking paper, on top of which sits a round piece of pizza dough, which is evenly sprinkled with grated cheese. A person's hand appears to cover another piece of dough with cheese. Fingers gently press the edges of the dough, sealing the top and bottom layers together tightly.", "visual_elements": [ { "content": "The camera focuses on a baking sheet lined with baking paper.", "type": "camera", "weight": 3 }, { "content": "There is a round piece of pizza dough on top of the baking sheet.", "type": "scene", "weight": 3 }, { "content": "There is a round piece of pizza dough evenly sprinkled with grated cheese.", "type": "attribute", "weight": 2 }, { "content": "A man's hand is seen covering another piece of dough with cheese.", "type": "scene", "weight": 3 }, { "content": "Fingers gently press the edges of the dough.", "type": "action", "weight": 3 }, { "content": "The top and bottom layers of dough are tightly sealed together there.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to look down on the entire baking sheet and the dough is shown to have completely covered the cheese.", "visual_elements": [ { "content": "The camera switches to an overhead view of the entire baking sheet.", "type": "camera", "weight": 3 }, { "content": "The dough is now completely covered with cheese.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0596", "video_path": "YouCook2/117/8ZX3Lazhkp4/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 12.05, "resolution": { "width": 640, "height": 480 }, "caption": "The video begins with the camera focusing on a stainless steel pot containing hot water. At the bottom of the screen is a line of text: “2” of water”. A hand appears holding a white spoon and sprinkles some white powdery substance into the pot, and the water begins to fluctuate and bubble.\nThe screen switches and a wooden spoon appears in the frame and the hand stirs the water in the pot with the wooden spoon.", "events": [ { "event": "The video begins with the camera focusing on a stainless steel pot containing hot water. At the bottom of the screen is a line of text: “2” of water”. A hand appears holding a white spoon and sprinkles some white powdery substance into the pot, and the water begins to fluctuate and bubble.", "visual_elements": [ { "content": "The camera focuses on a stainless steel pot filled with hot water", "type": "camera", "weight": 3 }, { "content": "At the bottom of the screen is a line that reads: “2” of water.”", "type": "scene", "weight": 2 }, { "content": "A hand appears in the frame.", "type": "scene", "weight": 2 }, { "content": "The hand holds a white spoon and sprinkles some white powder into the pot.", "type": "action", "weight": 2 }, { "content": "The water surface begins to fluctuate and bubbles appear.", "type": "attribute", "weight": 2 } ] }, { "event": "The screen switches and a wooden spoon appears in the frame and the hand stirs the water in the pot with the wooden spoon.", "visual_elements": [ { "content": "The scene changes.", "type": "camera", "weight": 3 }, { "content": "A wooden spoon appears.", "type": "scene", "weight": 3 }, { "content": "The hand stirs the water with the wooden spoon.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0597", "video_path": "YouCook2/117/jRdA68GbUMg/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 9.05, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a white pan that is empty. Then, a clear glass bowl containing four egg yolks appears in the upper right corner of the frame, and the yolks fall into the pot one by one. A hand appears in the frame holding a whisk and starts stirring the yolks.\nThe scene switches to four small glass bowls on the tabletop, each containing a different condiment, and a hand reaches out from the right side of the scene and picks up one of the bowls containing liquid.\nThe scene switches to a white pan in which the egg yolks have been whisked very well, and a hand is shown holding the bowl containing the liquid.", "events": [ { "event": "The video begins with the camera focusing on a white pan that is empty. Then, a clear glass bowl containing four egg yolks appears in the upper right corner of the frame, and the yolks fall into the pot one by one. A hand appears in the frame holding a whisk and starts stirring the yolks.", "visual_elements": [ { "content": "The camera focuses on a white pan.", "type": "camera", "weight": 3 }, { "content": "The pot is empty.", "type": "attribute", "weight": 2 }, { "content": "A clear glass bowl containing four egg yolks appears there in the upper right corner of the screen.", "type": "scene", "weight": 3 }, { "content": "One by one, the yolks fall into the pan.", "type": "scene", "weight": 2 }, { "content": "A hand holding a whisk appears and starts stirring the yolks.", "type": "scene", "weight": 2 } ] }, { "event": "The scene switches to four small glass bowls on the tabletop, each containing a different condiment, and a hand reaches out from the right side of the scene and picks up one of the bowls containing liquid.", "visual_elements": [ { "content": "The scene changes to four small glass bowls on the table there.", "type": "camera", "weight": 3 }, { "content": "Each bowl contains a different seasoning.", "type": "scene", "weight": 2 }, { "content": "A hand reaches out from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "This hand picks up one of the bowls containing liquid.", "type": "action", "weight": 3 } ] }, { "event": "The scene switches to a white pan in which the egg yolks have been whisked very well, and a hand is shown holding the bowl containing the liquid.", "visual_elements": [ { "content": "The scene changes to a white pan.", "type": "camera", "weight": 3 }, { "content": "The egg yolks have been whisked very well", "type": "scene", "weight": 2 }, { "content": "A hand holds the bowl of liquid.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0598", "video_path": "YouCook2/117/zZysbuDEtWY/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 6.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on an empty stainless steel pan, which is placed in the sink. Then the faucet is turned on and fresh water flows out of the faucet into the pot. The camera switches to a new scene in which the water in the pot bubbles and emits heat.\nThe camera switches to the knobs of the stove, and the hand turns the knobs to the off position, and the indicator lights all go out.", "events": [ { "event": "The video begins with the camera focusing on an empty stainless steel pan, which is placed in the sink. Then the faucet is turned on and fresh water flows out of the faucet into the pot. The camera switches to a new scene in which the water in the pot bubbles and emits heat.", "visual_elements": [ { "content": "The camera focuses on an empty stainless steel pan.", "type": "camera", "weight": 3 }, { "content": "The pan is placed in the sink.", "type": "scene", "weight": 3 }, { "content": "The faucet is turned on.", "type": "scene", "weight": 1 }, { "content": "Fresh water flows out of the faucet into the pot.", "type": "scene", "weight": 3 }, { "content": "The camera switches to a new scene.", "type": "camera", "weight": 3 }, { "content": "The water in the pot is bubbling and radiating heat.", "type": "scene", "weight": 3 } ] }, { "event": "The camera switches to the knobs of the stove, and the hand turns the knobs to the off position, and the indicator lights all go out.", "visual_elements": [ { "content": "The camera switches to the knob of the stove.", "type": "camera", "weight": 3 }, { "content": "The hand turns the knob to the off position.", "type": "action", "weight": 2 }, { "content": "All the lights go out.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0599", "video_path": "YouCook2/119/5_mNozo4yqM/split_6.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 19.02, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a metal bowl. Inside the bowl, there is minced red beef with black pepper and salt sprinkled on the surface. The background is a wooden cutting board. Then, the beaten yellow egg liquid is poured into the bowl. Subsequently, chopped green herbs are sprinkled into the bowl.\nThe camera then switches to show a wooden cutting board. On the board is a piece of yellow hard cheese and a metal grater. Subsequently, a right hand reaches into the frame and takes away the cheese block and the grater one after another.\nNext, the camera switches to the grated cheese. The grated cheese is piled on a white plate.\nThen, the camera switches back to the metal bowl again. The scene shows the mixture of beef, egg liquid and herbs inside the bowl. Subsequently, the grated cheese is evenly sprinkled into the bowl.", "events": [ { "event": "At the beginning of the video, the camera is focused on a metal bowl. Inside the bowl, there is minced red beef with black pepper and salt sprinkled on the surface. The background is a wooden cutting board. Then, the beaten yellow egg liquid is poured into the bowl. Subsequently, chopped green herbs are sprinkled into the bowl.", "visual_elements": [ { "content": "The camera is focused on a metal bowl.", "type": "camera", "weight": 3 }, { "content": "Inside the bowl, there is minced red beef.", "type": "attribute", "weight": 3 }, { "content": "There is minced red beef with black pepper and salt sprinkled on the surface.", "type": "attribute", "weight": 2 }, { "content": "The background is a wooden cutting board.", "type": "scene", "weight": 1 }, { "content": "The beaten yellow egg liquid is poured into the bowl.", "type": "attribute", "weight": 2 }, { "content": "The chopped green herbs are sprinkled into the bowl.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera then switches to show a wooden cutting board. On the board is a piece of yellow hard cheese and a metal grater. Subsequently, a right hand reaches into the frame and takes away the cheese block and the grater one after another.", "visual_elements": [ { "content": "The camera then switches to show a wooden cutting board", "type": "camera", "weight": 3 }, { "content": "On the board is a piece of yellow hard cheese and a metal grater", "type": "attribute", "weight": 3 }, { "content": "a right hand reaches into the frame.", "type": "action", "weight": 2 }, { "content": "This hand takes away the cheese block and the grater one after another.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to the grated cheese. The grated cheese is piled on a white plate.", "visual_elements": [ { "content": "The camera switches to the grated cheese.", "type": "camera", "weight": 3 }, { "content": "The grated cheese is piled on a white plate.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, the camera switches back to the metal bowl again. The scene shows the mixture of beef, egg liquid and herbs inside the bowl. Subsequently, the grated cheese is evenly sprinkled into the bowl.", "visual_elements": [ { "content": "The camera switches back to the metal bowl again.", "type": "camera", "weight": 3 }, { "content": "The scene shows the mixture of beef, egg liquid and herbs inside the bowl.", "type": "attribute", "weight": 2 }, { "content": "The grated cheese is evenly sprinkled into the bowl.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 15 }, { "index": "TUNA_0600", "video_path": "YouCook2/119/BNP9sssEkdU/split_6.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 7.04, "resolution": { "width": 640, "height": 480 }, "caption": "At the beginning of the video, the camera is aimed at a baking tray lined with aluminum foil. A raw meat patty is placed on the baking tray. The meat patty is rectangular in shape. There is some red and green vegetable particles on the surface of the meat. A hand holding a metal spoon appears in the frame and the spoon is spreading sauce on the surface of the meat patty. The wooden tabletop can be seen in the background.\nNext, a line of text description appears at the bottom of the screen: \"Bake at 325°F for 70 min.\", and the baking tray is carried away.\nSubsequently, the camera switches to a new scene, showing the interior of the oven. The baking tray is placed into the oven.", "events": [ { "event": "At the beginning of the video, the camera is aimed at a baking tray lined with aluminum foil. A raw meat patty is placed on the baking tray. The meat patty is rectangular in shape. There is some red and green vegetable particles on the surface of the meat. A hand holding a metal spoon appears in the frame and the spoon is spreading sauce on the surface of the meat patty. The wooden tabletop can be seen in the background.", "visual_elements": [ { "content": "The camera is aimed at a baking tray lined with aluminum foil.", "type": "camera", "weight": 3 }, { "content": "A raw meat patty is placed on the baking tray.", "type": "attribute", "weight": 3 }, { "content": "The meat patty is rectangular in shape.", "type": "attribute", "weight": 3 }, { "content": "There is some red and green vegetable particles on the surface of the meat.", "type": "attribute", "weight": 2 }, { "content": "A hand holding a metal spoon appears in the frame.", "type": "attribute", "weight": 2 }, { "content": "The spoon is spreading sauce on the surface of the meat patty.", "type": "action", "weight": 3 }, { "content": "The background is the wooden tabletop.", "type": "scene", "weight": 1 } ] }, { "event": "Next, a line of text description appears at the bottom of the screen: \"Bake at 325°F for 70 min.\", and the baking tray is carried away.", "visual_elements": [ { "content": "A line of text description appears at the bottom of the screen: \"Bake at 325°F for 70 min.\"", "type": "scene", "weight": 2 }, { "content": "The baking tray is carried away.", "type": "action", "weight": 2 } ] }, { "event": "Subsequently, the camera switches to a new scene, showing the interior of the oven. The baking tray is placed into the oven.", "visual_elements": [ { "content": "The camera switches to a new scene, showing the interior of the oven.", "type": "camera", "weight": 3 }, { "content": "The baking tray is placed into the oven.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0601", "video_path": "YouCook2/119/GAKI57aE3Yo/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 18.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a stainless steel pot. There are chopped onions and green peppers in the pot. A wooden spoon is stirring these vegetables inside the pot. The background is a black stove.\nNext, the camera switches to show that the vegetables in the pot have already been stir-fried and changed color. The wooden spoon continues to stir-fry and then is placed horizontally above the pot. Subsequently, a hand picks up two seasoning bottles one after another. One seasoning bottle contains red seasoning, and the other contains dried green coriander. The seasonings are sprinkled on the onions and green peppers successively.", "events": [ { "event": "At the beginning of the video, the camera is focused on a stainless steel pot. There are chopped onions and green peppers in the pot. A wooden spoon is stirring these vegetables inside the pot. The background is a black stove.", "visual_elements": [ { "content": "The camera is focused on a stainless steel pot.", "type": "camera", "weight": 3 }, { "content": "There are chopped onions and green peppers in the pot.", "type": "attribute", "weight": 3 }, { "content": "A wooden spoon is stirring these vegetables inside the pot.", "type": "action", "weight": 3 }, { "content": "The background is a black stove.", "type": "scene", "weight": 2 } ] }, { "event": "Next, the camera switches to show that the vegetables in the pot have already been stir-fried and changed color. The wooden spoon continues to stir-fry and then is placed horizontally above the pot. Subsequently, a hand picks up two seasoning bottles one after another. One seasoning bottle contains red seasoning, and the other contains dried green coriander. The seasonings are sprinkled on the onions and green peppers successively.", "visual_elements": [ { "content": "The camera switches.", "type": "camera", "weight": 3 }, { "content": "It shows that the vegetables in the pot have already been stir-fried and changed color.", "type": "attribute", "weight": 2 }, { "content": "The wooden spoon continues to stir-fry.", "type": "action", "weight": 2 }, { "content": "The wooden spoon is placed horizontally above the pot.", "type": "action", "weight": 2 }, { "content": "A hand picks up two seasoning bottles one after another.", "type": "action", "weight": 2 }, { "content": "One seasoning bottle contains red seasoning.", "type": "attribute", "weight": 2 }, { "content": "The other contains dried green coriander.", "type": "attribute", "weight": 2 }, { "content": "The seasonings are sprinkled on the onions and green peppers successively.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0602", "video_path": "YouCook2/119/GAKI57aE3Yo/split_9.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 26.06, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is aimed at a kitchen countertop. There is a transparent glass baking pan placed on a wooden cutting board, and inside the pan is a baked meat patty. On the background are some spice bottles, a tissue holder and a large loaf of bread on the grill.\nNext, a pair of hands appears in the frame. The left hand is holding a small bowl with red sauce in it. The right hand is holding a white rubber spatula, and the spatula repeatedly scoops up the sauce in the bowl to spread it on the meat patty.\nThen, the camera switches, and the view changes from a four-grid layout back to a full-screen view. In the center of the screen is a square glass plate with the meat patty inside. There is a left hand pouring the red sauce from the bowl onto the meat patty, and the right hand is wearing white gloves and holding a whisk. Finally, the right hand uses the whisk to evenly distribute the sauce over the meat patty.\nSubsequently, the camera switches again. The already baked meat patty has the red sauce evenly distributed on it, and the oil around the meat patty is still bubbling hot.", "events": [ { "event": "At the beginning of the video, the camera is aimed at a kitchen countertop. There is a transparent glass baking pan placed on a wooden cutting board, and inside the pan is a baked meat patty. On the background are some spice bottles, a tissue holder and a large loaf of bread on the grill.", "visual_elements": [ { "content": "The camera is aimed at a kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "There is a transparent glass baking pan placed on a wooden cutting board.", "type": "attribute", "weight": 2 }, { "content": "Inside the pan is a baked meat patty.", "type": "attribute", "weight": 3 }, { "content": "On the background are some spice bottles, a tissue holder and a large loaf of bread on the grill.", "type": "scene", "weight": 2 } ] }, { "event": "Next, a pair of hands appears in the frame. The left hand is holding a small bowl with red sauce in it. The right hand is holding a white rubber spatula, and the spatula repeatedly scoops up the sauce in the bowl to spread it on the meat patty.", "visual_elements": [ { "content": "A pair of hands appears in the frame.", "type": "attribute", "weight": 1 }, { "content": "The left hand is holding a small bowl.", "type": "attribute", "weight": 2 }, { "content": "There is red sauce in the bowl.", "type": "attribute", "weight": 2 }, { "content": "The right hand is holding a white rubber spatula.", "type": "attribute", "weight": 2 }, { "content": "The spatula repeatedly scoops up the sauce in the bowl to spread it on the meat patty.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches, and the view changes from a four-grid layout back to a full-screen view. In the center of the screen is a square glass plate with the meat patty inside. There is a left hand pouring the red sauce from the bowl onto the meat patty, and the right hand is wearing white gloves and holding a whisk. Finally, the right hand uses the whisk to evenly distribute the sauce over the meat patty.", "visual_elements": [ { "content": "The camera switches.", "type": "camera", "weight": 3 }, { "content": "The view changes from a four-grid layout back to a full-screen view.", "type": "camera", "weight": 3 }, { "content": "In the center of the screen is a square glass plate.", "type": "attribute", "weight": 2 }, { "content": "In the plate is the meat patty.", "type": "attribute", "weight": 2 }, { "content": "There is a left hand pouring the red sauce from the bowl onto the meat patty.", "type": "action", "weight": 3 }, { "content": "The right hand is wearing white gloves and holding a whisk.", "type": "attribute", "weight": 2 }, { "content": "The right hand uses the whisk to evenly distribute the sauce over the meat patty.", "type": "action", "weight": 3 } ] }, { "event": "Subsequently, the camera switches again. The already baked meat patty has the red sauce evenly distributed on it, and the oil around the meat patty is still bubbling hot.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The already baked meat patty has the red sauce evenly distributed on it.", "type": "attribute", "weight": 3 }, { "content": "The oil around the meat patty is still bubbling hot.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0603", "video_path": "YouCook2/119/jgeZjmXCDiw/split_12.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 5.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a kitchen countertop. The background shows brown cabinets and a stainless steel sink. A woman wearing a white sweater and an apron is standing in front of the countertop. The words \"Mangia E Tatti Zitto!\" are written on her apron. She is holding a grey baking tray with both hands. There is a piece of baking paper laid on the tray, and on the paper is a seasoned meat roll.\nThe camera zooms in downward, and it can be seen that the meat roll is mixed with green herbs and other seasonings.\nNext, the camera pulls back slightly and moves upward. The woman turns around and walks towards the right side with the baking tray in the hands.\nThe camera switches to a close-up shot and rotates to the right. It shows the baked meat roll on the baking tray. The color of the meat roll has turned to a deep red. The white cheese inside has puffed up, and some grease is oozing out from both sides.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen countertop. The background shows brown cabinets and a stainless steel sink. A woman wearing a white sweater and an apron is standing in front of the countertop. The words \"Mangia E Tatti Zitto!\" are written on her apron. She is holding a grey baking tray with both hands. There is a piece of baking paper laid on the tray, and on the paper is a seasoned meat roll.", "visual_elements": [ { "content": "The camera is focused on a kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "The background shows brown cabinets and a stainless steel sink.", "type": "scene", "weight": 1 }, { "content": "A woman wearing a white sweater and an apron is standing in front of the countertop.", "type": "attribute", "weight": 3 }, { "content": "The words \"Mangia E Tatti Zitto!\" are written on the apron.", "type": "attribute", "weight": 2 }, { "content": "This woman is holding a grey baking tray with both hands.", "type": "attribute", "weight": 2 }, { "content": "There is a piece of baking paper laid on the tray.", "type": "attribute", "weight": 2 }, { "content": "On the paper is a seasoned meat roll.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera zooms in downward, and it can be seen that the meat roll is mixed with green herbs and other seasonings.", "visual_elements": [ { "content": "The camera zooms in downward.", "type": "camera", "weight": 3 }, { "content": "It can be seen that the meat roll is mixed with green herbs and other seasonings.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera pulls back slightly and moves upward. The woman turns around and walks towards the right side with the baking tray in the hands.", "visual_elements": [ { "content": "The camera pulls back slightly and moves upward.", "type": "camera", "weight": 3 }, { "content": "The woman turns around and walks towards the right side with the baking tray in the hands.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a close-up shot and rotates to the right. It shows the baked meat roll on the baking tray. The color of the meat roll has turned to a deep red. The white cheese inside has puffed up, and some grease is oozing out from both sides.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The camera rotates to the right.", "type": "camera", "weight": 3 }, { "content": "It shows the baked meat roll on the baking tray.", "type": "attribute", "weight": 3 }, { "content": "The color of the meat roll has turned to a deep red.", "type": "attribute", "weight": 2 }, { "content": "The white cheese inside has puffed up.", "type": "attribute", "weight": 2 }, { "content": "Some grease is oozing out from both sides.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0604", "video_path": "YouCook2/120/GCwrUPndL4g/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 9.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, a close-up shot of a metal grater is shown on the left side of the screen. The background is a wooden cutting board. On the right side of the screen, there is a pile of grated onion shreds. Onion shreds are light yellow in color.\nThen, the screen pans to the right, and the camera returns to a close-up of the grater. A text prompt \"grate 1 medium onion\" appears in the lower right corner of the screen. Then a hand holds an onion and moves it back and forth on the grater, and the onion gradually turns into fine onion shreds.\nNext, the camera switches to a close-up shot of a strainer. The strainer is filled with the grated onion shreds. The hand appears again and places the freshly grated white onion shreds on it.", "events": [ { "event": "At the beginning of the video, a close-up shot of a metal grater is shown on the left side of the screen. The background is a wooden cutting board. On the right side of the screen, there is a pile of grated onion shreds. Onion shreds are light yellow in color.", "visual_elements": [ { "content": "A close-up shot of a metal grater is shown on the left side of the screen.", "type": "camera", "weight": 3 }, { "content": "The background is a wooden cutting board.", "type": "scene", "weight": 1 }, { "content": "On the right side of the screen, there is a pile of grated onion shreds.", "type": "attribute", "weight": 2 }, { "content": "Onion shreds are light yellow in color.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the screen pans to the right, and the camera returns to a close-up of the grater. A text prompt \"grate 1 medium onion\" appears in the lower right corner of the screen. Then a hand holds an onion and moves it back and forth on the grater, and the onion gradually turns into fine onion shreds.", "visual_elements": [ { "content": "The screen pans to the right.", "type": "camera", "weight": 3 }, { "content": "The camera returns to a close-up of the grater.", "type": "camera", "weight": 3 }, { "content": "A text prompt \"grate 1 medium onion\" appears in the lower right corner of the screen.", "type": "attribute", "weight": 2 }, { "content": "A hand holds an onion and moves it back and forth on the grater.", "type": "action", "weight": 3 }, { "content": "The onion gradually turns into fine onion shreds.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera switches to a close-up shot of a strainer. The strainer is filled with the grated onion shreds. The hand appears again and places the freshly grated white onion shreds on it.", "visual_elements": [ { "content": "The camera switches to a close-up shot of a strainer.", "type": "camera", "weight": 3 }, { "content": "The strainer is filled with the grated onion shreds.", "type": "attribute", "weight": 2 }, { "content": "The hand appears again and places the freshly grated white onion shreds on it.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0605", "video_path": "YouCook2/120/GCwrUPndL4g/split_6.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 10.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a transparent glass bowl which contains some ingredients. The ingredients include chopped potatoes, flour, black pepper powder and eggs. The shot is taken from a top-down view. The background is a wooden tabletop.\nNext, some seasonings are sprinkled into the bowl. The subtitle shows that the added seasonings are \"1/2 tsp garlic powder (optional)\".\nThen, the camera switches to a slightly side angle. The scene shows a white rubber spatula starting to stir and press the ingredients in the bowl. The stirring makes them evenly mixed. The Instagram icon and text description appear in the lower left corner of the screen, prompting the viewers to follow @thecooknshare on Instagram.", "events": [ { "event": "At the beginning of the video, the camera is focused on a transparent glass bowl which contains some ingredients. The ingredients include chopped potatoes, flour, black pepper powder and eggs. The shot is taken from a top-down view. The background is a wooden tabletop.", "visual_elements": [ { "content": "The camera is focused on a transparent glass bowl.", "type": "camera", "weight": 3 }, { "content": "The bowl contains some ingredients.", "type": "attribute", "weight": 2 }, { "content": "The ingredients include chopped potatoes, flour, black pepper powder and eggs.", "type": "attribute", "weight": 2 }, { "content": "The shot is taken from a top-down view.", "type": "camera", "weight": 3 }, { "content": "The background is a wooden tabletop.", "type": "scene", "weight": 1 } ] }, { "event": "Next, some seasonings are sprinkled into the bowl. The subtitle shows that the added seasonings are \"1/2 tsp garlic powder (optional)\".", "visual_elements": [ { "content": "Some seasonings are sprinkled into the bowl.", "type": "action", "weight": 2 }, { "content": "The subtitle shows that the added seasonings are \"1/2 tsp garlic powder (optional)\".", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera switches to a slightly side angle. The scene shows a white rubber spatula starting to stir and press the ingredients in the bowl. The stirring makes them evenly mixed. The Instagram icon and text description appear in the lower left corner of the screen, prompting the viewers to follow @thecooknshare on Instagram.", "visual_elements": [ { "content": "The camera switches to a slightly side angle.", "type": "camera", "weight": 3 }, { "content": "The scene shows a white rubber spatula starting to stir and press the ingredients in the bowl.", "type": "action", "weight": 3 }, { "content": "The stirring makes the ingredients evenly mixed.", "type": "action", "weight": 2 }, { "content": "The Instagram icon and text description appear in the lower left corner of the screen.", "type": "attribute", "weight": 1 }, { "content": "The icon and text description prompts the viewers to follow @thecooknshare on Instagram.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0606", "video_path": "YouCook2/120/a6Axxe-Q9K0/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 20.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the start of the video, the camera zooms in on a heating frying pan. A person uses a spoon to put a small piece of butter into the pan, and the spoon presses and stirs the butter to make it melt and bubble. Then, the person's right hand puts some chopped ingredients onto the already melted butter in the pan.\nNext, the camera switches to some chopped light yellow ingredients placed on a person's left hand, and the right hand is adjusting the shape of the ingredients with fingers. The close-up shot of the ingredients in the hand is shown, and then the camera switches as the right hand puts these ingredients into the pan.\nThen, the camera zooms in again and switches to this pan. The butter has already melted and is bubbling with white foam. This time, the person's right hand picks up the same ingredients again and continues to put them into the pan. A part of the kitchen, including the stove and some kitchen utensils, can be seen in the background.\nFinally, the camera switches again, and the picture shows that there are already five piles of ingredients in the pan.", "events": [ { "event": "At the start of the video, the camera zooms in on a heating frying pan. A person uses a spoon to put a small piece of butter into the pan, and the spoon presses and stirs the butter to make it melt and bubble. Then, the person's right hand puts some chopped ingredients onto the already melted butter in the pan.", "visual_elements": [ { "content": "The camera zooms in on a heating frying pan.", "type": "camera", "weight": 3 }, { "content": "A person uses a spoon to put a small piece of butter into the pan.", "type": "action", "weight": 3 }, { "content": "The spoon presses and stirs the butter to make it melt and bubble.", "type": "action", "weight": 2 }, { "content": "The person's right hand puts some chopped ingredients onto the already melted butter in the pan.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to some chopped light yellow ingredients placed on a person's left hand, and the right hand is adjusting the shape of the ingredients with fingers. The close-up shot of the ingredients in the hand is shown, and then the camera switches as the right hand puts these ingredients into the pan.", "visual_elements": [ { "content": "The camera switches to some chopped light yellow ingredients placed on a person's left hand.", "type": "camera", "weight": 3 }, { "content": "The right hand is adjusting the shape of the ingredients with fingers.", "type": "action", "weight": 2 }, { "content": "The close-up shot of the ingredients in the hand is shown.", "type": "camera", "weight": 3 }, { "content": "The camera switches.", "type": "camera", "weight": 3 }, { "content": "The right hand puts these ingredients into the pan.", "type": "action", "weight": 2 } ] }, { "event": "Then, the camera zooms in again and switches to this pan. The butter has already melted and is bubbling with white foam. This time, the person's right hand picks up the same ingredients again and continues to put them into the pan. A part of the kitchen, including the stove and some kitchen utensils, can be seen in the background.", "visual_elements": [ { "content": "The camera zooms in again and switches to this pan.", "type": "camera", "weight": 3 }, { "content": "The butter has already melted and is bubbling with white foam.", "type": "attribute", "weight": 2 }, { "content": "This time, the person's right hand picks up the same ingredients again", "type": "action", "weight": 2 }, { "content": "The hand continues to put them into the pan.", "type": "action", "weight": 2 }, { "content": "A part of the kitchen, including the stove and some kitchen utensils, can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera switches again, and the picture shows that there are already five piles of ingredients in the pan.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The picture shows that there are already five piles of ingredients in the pan.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0607", "video_path": "YouCook2/120/k-aTILi_nLY/split_6.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 24.06, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a metal bowl filled with shredded potatoes. The potato shreds are light yellow in color, with some having a slight brownish tint. The camera mainly focuses on the potato shreds inside the bowl. Then, the subtitle \"1/4 tsp onion powder\" appears on the screen, and at the same time, onion powder is sprinkled into the bowl and is light yellow in color. Subsequently, the subtitle changes to \"1/4 tsp salt\", and white salt is sprinkled into the bowl.\nNext, the lighting in the scene gets darker, and the subtitle shows \"1 Tbsp potato powder\". Potato powder, which is off-white in color, can be seen being sprinkled into the bowl, covering the potato shreds and other seasonings. Then, the subtitle changes to \"1 Tbsp corn starch\", and another spoonful of pure white corn starch is sprinkled into the bowl, continuing to cover the previous mixture.\nThen, the subtitle shows \"2 Tbsp rice flour\". Then the camera switches, and the lighting in the scene changes to a warm tone, and several spoonfuls of rice flour are sprinkled into the bowl. Then, the subtitle shows \"1 Tbsp water\", and a spoonful of transparent water is poured into the bowl. Finally, a mixer appears in the scene, starting to stir the mixture inside the bowl. The potato shreds, powders, salt, corn starch and water gradually mix evenly, and the bowl shakes slightly with the stirring action.", "events": [ { "event": "At the beginning of the video, the camera is focused on a metal bowl filled with shredded potatoes. The potato shreds are light yellow in color, with some having a slight brownish tint. The camera mainly focuses on the potato shreds inside the bowl. Then, the subtitle \"1/4 tsp onion powder\" appears on the screen, and at the same time, onion powder is sprinkled into the bowl and is light yellow in color. Subsequently, the subtitle changes to \"1/4 tsp salt\", and white salt is sprinkled into the bowl.", "visual_elements": [ { "content": "The camera is focused on a metal bowl filled with shredded potatoes.", "type": "camera", "weight": 3 }, { "content": "The potato shreds are light yellow in color.", "type": "attribute", "weight": 2 }, { "content": "Some potato shreds have a slight brownish tint.", "type": "attribute", "weight": 1 }, { "content": "The camera mainly focuses on the potato shreds inside the bowl.", "type": "camera", "weight": 3 }, { "content": "The subtitle \"1/4 tsp onion powder\" appears on the screen.", "type": "attribute", "weight": 1 }, { "content": "Onion powder is sprinkled into the bowl.", "type": "action", "weight": 3 }, { "content": "The onion powder is light yellow in color.", "type": "attribute", "weight": 2 }, { "content": "The subtitle changes to \"1/4 tsp salt\"", "type": "attribute", "weight": 1 }, { "content": "White salt is sprinkled into the bowl.", "type": "action", "weight": 3 } ] }, { "event": "Next, the lighting in the scene gets darker, and the subtitle shows \"1 Tbsp potato powder\". Potato powder, which is off-white in color, can be seen being sprinkled into the bowl, covering the potato shreds and other seasonings. Then, the subtitle changes to \"1 Tbsp corn starch\", and another spoonful of pure white corn starch is sprinkled into the bowl, continuing to cover the previous mixture.", "visual_elements": [ { "content": "The lighting in the scene gets darker.", "type": "scene", "weight": 1 }, { "content": "The subtitle shows \"1 Tbsp potato powder\".", "type": "attribute", "weight": 2 }, { "content": "Potato powder can be seen being sprinkled into the bowl.,", "type": "scene", "weight": 3 }, { "content": "The potato powder shows off-white in color.", "type": "attribute", "weight": 2 }, { "content": "The potato powder covers the potato shreds and other seasonings.", "type": "attribute", "weight": 1 }, { "content": "The subtitle changes to \"1 Tbsp corn starch\".", "type": "attribute", "weight": 1 }, { "content": "Another spoonful of pure white corn starch is sprinkled into the bowl.", "type": "action", "weight": 3 }, { "content": "The corn starch continues to cover the previous mixture.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the subtitle shows \"2 Tbsp rice flour\". Then the camera switches, and the lighting in the scene changes to a warm tone, and several spoonfuls of rice flour are sprinkled into the bowl. Then, the subtitle shows \"1 Tbsp water\", and a spoonful of transparent water is poured into the bowl. Finally, a mixer appears in the scene, starting to stir the mixture inside the bowl. The potato shreds, powders, salt, corn starch and water gradually mix evenly, and the bowl shakes slightly with the stirring action.", "visual_elements": [ { "content": "The subtitle shows \"2 Tbsp rice flour\".", "type": "attribute", "weight": 1 }, { "content": "Then the camera switches,", "type": "camera", "weight": 3 }, { "content": "The lighting in the scene changes to a warm tone,.", "type": "scene", "weight": 1 }, { "content": "Several spoonfuls of rice flour are sprinkled into the bowl.", "type": "action", "weight": 3 }, { "content": "The subtitle shows \"1 Tbsp water\".", "type": "attribute", "weight": 1 }, { "content": "A spoonful of transparent water is poured into the bowl.", "type": "action", "weight": 3 }, { "content": "A mixer appears in the scene.", "type": "scene", "weight": 3 }, { "content": "The mixer starts to stir the mixture inside the bowl.", "type": "action", "weight": 3 }, { "content": "The potato shreds, powders, salt, corn starch and water gradually mix evenly.", "type": "attribute", "weight": 2 }, { "content": "The bowl shakes slightly with the stirring action.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 27 }, { "index": "TUNA_0608", "video_path": "YouCook2/121/UaQ_PwDnIis/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 29.03, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a white bowl. Next, the camera is aimed at a large beige pot that is empty inside. Subsequently, a hand holding chopped pieces of meat appears in the frame and puts these pieces of meat into the pot. The camera shakes violently, and the edge of the pot can be seen to be black.\nThe camera continues to shake, and a hand holding a black spatula starts to stir-fry the pieces of meat in the pot, repeating the stir-frying action.\nNext, the camera switches, continues to shake and is focused on the bowl, giving a close-up of the stir-fried pieces of meat. The pieces of meat have turned golden brown, and the picture is blurry.", "events": [ { "event": "At the beginning of the video, the camera is focused on a white bowl. Next, the camera is aimed at a large beige pot that is empty inside. Subsequently, a hand holding chopped pieces of meat appears in the frame and puts these pieces of meat into the pot. The camera shakes violently, and the edge of the pot can be seen to be black.", "visual_elements": [ { "content": "The camera is focused on a white bowl.", "type": "camera", "weight": 3 }, { "content": "The camera is aimed at a large beige pot that is empty inside.", "type": "camera", "weight": 3 }, { "content": "A hand holding chopped pieces of meat appears in the frame.", "type": "attribute", "weight": 2 }, { "content": "This hand puts these pieces of meat into the pot.", "type": "action", "weight": 3 }, { "content": "The camera shakes violently,", "type": "camera", "weight": 3 }, { "content": "The edge of the pot can be seen to be black.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to shake, and a hand holding a black spatula starts to stir-fry the pieces of meat in the pot, repeating the stir-frying action.", "visual_elements": [ { "content": "The camera continues to shake.", "type": "camera", "weight": 3 }, { "content": "A hand holding a black spatula starts to stir-fry the pieces of meat in the pot.", "type": "action", "weight": 3 }, { "content": "The hand repeats the stir-frying action.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches, continues to shake and is focused on the bowl, giving a close-up of the stir-fried pieces of meat. The pieces of meat have turned golden brown, and the picture is blurry.", "visual_elements": [ { "content": "The camera switches, continues to shake and is focused on the bowl.", "type": "camera", "weight": 3 }, { "content": "The camera gives a close-up of the stir-fried pieces of meat.", "type": "camera", "weight": 3 }, { "content": "The pieces of meat have turned golden brown.", "type": "attribute", "weight": 2 }, { "content": "The picture is blurry.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0609", "video_path": "YouCook2/121/fEN_fm6kX6k/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 20.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a kitchen countertop. A woman wearing a white short-sleeved top and a blue apron is standing in front of the countertop. She is wearing a necklace around her neck with a heart-shaped pendant. Her right hand is holding a long-handled spoon, and her left hand is holding a glass measuring cup with a strainer, and several white paper towels are laid on the strainer.\nThe camera zooms in to focus on her actions. She scoops out liquid from a large black pot with the long-handled spoon. The woman places the pot on a gas stove, and steam is rising from the pot. She pours the scooped liquid into the strainer, and the liquid flows through the strainer into the glass measuring cup below, and the camera moves to the right accordingly. She repeats this action several times, and the camera moves accordingly.\nThe camera moves upward. While scooping out the liquid, the woman looks at the camera and speaks. Other parts of the kitchen can be seen in the background, including a stainless steel sink and cabinets. There are several pots hanging on the wall above the sink, and some kitchen supplies and decorations are placed on the cabinets. There is a red enamel pot on the gas stove.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen countertop. A woman wearing a white short-sleeved top and a blue apron is standing in front of the countertop. She is wearing a necklace around her neck with a heart-shaped pendant. Her right hand is holding a long-handled spoon, and her left hand is holding a glass measuring cup with a strainer, and several white paper towels are laid on the strainer.", "visual_elements": [ { "content": "The camera is focused on a kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "A woman wearing a white short-sleeved top and a blue apron is standing in front of the countertop.", "type": "attribute", "weight": 2 }, { "content": "This woman is wearing a necklace around her neck.", "type": "attribute", "weight": 2 }, { "content": "On the neck has a heart-shaped pendant.", "type": "attribute", "weight": 2 }, { "content": "This woman's right hand is holding a long-handled spoon.", "type": "action", "weight": 3 }, { "content": "This woman's left hand is holding a glass measuring cup with a strainer.", "type": "action", "weight": 3 }, { "content": "Several white paper towels are laid on the strainer.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera zooms in to focus on her actions. She scoops out liquid from a large black pot with the long-handled spoon. The woman places the pot on a gas stove, and steam is rising from the pot. She pours the scooped liquid into the strainer, and the liquid flows through the strainer into the glass measuring cup below, and the camera moves to the right accordingly. She repeats this action several times, and the camera moves accordingly.", "visual_elements": [ { "content": "The camera zooms in to focus on the woman's actions.", "type": "camera", "weight": 3 }, { "content": "This woman scoops out liquid from a large black pot with the long-handled spoon.", "type": "action", "weight": 3 }, { "content": "The woman places the pot on a gas stove.", "type": "attribute", "weight": 2 }, { "content": "The steam is rising from the pot.", "type": "attribute", "weight": 1 }, { "content": "This woman pours the scooped liquid into the strainer.", "type": "action", "weight": 3 }, { "content": "The liquid flows through the strainer into the glass measuring cup below.", "type": "attribute", "weight": 1 }, { "content": "The camera moves to the right accordingly.", "type": "camera", "weight": 3 }, { "content": "The woman repeats this action several times.", "type": "action", "weight": 2 }, { "content": "The camera moves accordingly.", "type": "camera", "weight": 3 } ] }, { "event": "The camera moves upward. While scooping out the liquid, the woman looks at the camera and speaks. Other parts of the kitchen can be seen in the background, including a stainless steel sink and cabinets. There are several pots hanging on the wall above the sink, and some kitchen supplies and decorations are placed on the cabinets. There is a red enamel pot on the gas stove.", "visual_elements": [ { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "While scooping out the liquid, the woman looks at the camera and speaks.", "type": "action", "weight": 2 }, { "content": "Other parts of the kitchen can be seen in the background, including a stainless steel sink and cabinets.", "type": "scene", "weight": 2 }, { "content": "There are several pots hanging on the wall above the sink.", "type": "attribute", "weight": 2 }, { "content": "Some kitchen supplies and decorations are placed on the cabinets.", "type": "attribute", "weight": 2 }, { "content": "There is a red enamel pot on the gas stove.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0610", "video_path": "YouCook2/121/nfO0YS2dSdw/split_6.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 11.01, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a stainless steel frying pan. There are ingredients that are boiling inside the pan. The ingredients include small pieces of potatoes and some pieces of meat. The liquid part is light yellow in color and there are some bubbles on the surface. A part of the stove can be seen in the background.\nNext, the camera remains in the same position, and the ingredients in the pan continue to boil. Then, some diced red vegetables are sprinkled into the pan, and then white milk is poured into the pan. Finally, a wooden spoon appears in the picture and starts to stir the ingredients in the pan. The wooden spoon evenly mixes the white milk, red vegetables, potatoes and pieces of meat.", "events": [ { "event": "At the beginning of the video, the camera is focused on a stainless steel frying pan. There are ingredients that are boiling inside the pan. The ingredients include small pieces of potatoes and some pieces of meat. The liquid part is light yellow in color and there are some bubbles on the surface. A part of the stove can be seen in the background.", "visual_elements": [ { "content": "The camera is focused on a stainless steel frying pan.", "type": "camera", "weight": 3 }, { "content": "There are ingredients that are boiling inside the pan.", "type": "attribute", "weight": 2 }, { "content": "The ingredients include small pieces of potatoes and some pieces of meat.", "type": "attribute", "weight": 3 }, { "content": "The liquid part is light yellow in color and there are some bubbles on the surface.", "type": "attribute", "weight": 2 }, { "content": "A part of the stove can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera remains in the same position, and the ingredients in the pan continue to boil. Then, some diced red vegetables are sprinkled into the pan, and then white milk is poured into the pan. Finally, a wooden spoon appears in the picture and starts to stir the ingredients in the pan. The wooden spoon evenly mixes the white milk, red vegetables, potatoes and pieces of meat.", "visual_elements": [ { "content": "The camera remains in the same position.", "type": "camera", "weight": 3 }, { "content": "The ingredients in the pan continue to boil.", "type": "attribute", "weight": 1 }, { "content": "Some diced red vegetables are sprinkled into the pan.", "type": "action", "weight": 3 }, { "content": "White milk is poured into the pan.", "type": "action", "weight": 3 }, { "content": "A wooden spoon appears in the picture.", "type": "scene", "weight": 2 }, { "content": "The wooden spoon starts to stir the ingredients in the pan.", "type": "action", "weight": 3 }, { "content": "The wooden spoon evenly mixes the white milk, red vegetables, potatoes and pieces of meat.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0611", "video_path": "YouCook2/122/-yfTO7V2d_E/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 8.01, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a kitchen stove with a stainless steel pot placed on it. There are some cut pieces of vegetables in the pot. A person wearing a light green short-sleeved shirt is standing in front of the stove. This person's right hand is holding a transparent measuring cup filled with brown liquid, and his left hand is holding a wooden spoon to stir in the pot.\nNext, this person pours the liquid in the measuring cup into the pot. After the liquid is poured, this person stops stirring, places the measuring cup aside with his right hand, and intends to pick up a can of tomatoes. During the process of pouring the liquid, some cans can be seen placed on the kitchen countertop in the background. There are obvious labels on the cans, showing that they are whole peeled tomatoes. There are also some other kitchen utensils and spice bottles on the countertop.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen stove with a stainless steel pot placed on it. There are some cut pieces of vegetables in the pot. A person wearing a light green short-sleeved shirt is standing in front of the stove. This person's right hand is holding a transparent measuring cup filled with brown liquid, and his left hand is holding a wooden spoon to stir in the pot.", "visual_elements": [ { "content": "The camera is focused on a kitchen stove.", "type": "camera", "weight": 3 }, { "content": "There is a stainless steel pot placed on the stove.", "type": "scene", "weight": 2 }, { "content": "There are some cut pieces of vegetables in the pot.", "type": "attribute", "weight": 2 }, { "content": "A person wearing a light green short-sleeved shirt is standing in front of the stove.", "type": "attribute", "weight": 2 }, { "content": "This person's right hand is holding a transparent measuring cup filled with brown liquid.", "type": "action", "weight": 2 }, { "content": "This person's left hand is holding a wooden spoon to stir in the pot.", "type": "action", "weight": 3 } ] }, { "event": "Next, this person pours the liquid in the measuring cup into the pot. After the liquid is poured, this person stops stirring, places the measuring cup aside with his right hand, and intends to pick up a can of tomatoes. During the process of pouring the liquid, some cans can be seen placed on the kitchen countertop in the background. There are obvious labels on the cans, showing that they are whole peeled tomatoes. There are also some other kitchen utensils and spice bottles on the countertop.", "visual_elements": [ { "content": "This person pours the liquid in the measuring cup into the pot.", "type": "action", "weight": 3 }, { "content": "After the liquid is poured, this person stops stirring.", "type": "attribute", "weight": 1 }, { "content": "The right hand places the measuring cup aside.", "type": "action", "weight": 2 }, { "content": "The right hand intends to pick up a can of tomatoes.", "type": "action", "weight": 2 }, { "content": "During the process of pouring the liquid, some cans can be seen placed on the kitchen countertop in the background.", "type": "scene", "weight": 2 }, { "content": "There are obvious labels on the cans.", "type": "attribute", "weight": 1 }, { "content": "The labels show that they are whole peeled tomatoes.", "type": "attribute", "weight": 1 }, { "content": "There are also some other kitchen utensils and spice bottles on the countertop.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0612", "video_path": "YouCook2/122/-yfTO7V2d_E/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 24.06, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a kitchen stove with a stainless steel pot placed on it. There are some cut pieces of vegetables in the pot, and a wooden spoon is also in the pot. In the background, the kitchen countertop can be seen, with some kitchen utensils and spice jars placed on it.\nA person wearing a light green short-sleeved shirt and khaki pants appears on the left side of the screen. His right hand picks up the wooden spoon, and his left hand pours the red liquid from a can into the pot. Then he continues to stir with the wooden spoon.\nSubsequently, this person picks up a brown bottle and sprinkles the white seasoning from the bottle into the pot.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen stove with a stainless steel pot placed on it. There are some cut pieces of vegetables in the pot, and a wooden spoon is also in the pot. In the background, the kitchen countertop can be seen, with some kitchen utensils and spice jars placed on it.", "visual_elements": [ { "content": "The camera is focused on a kitchen stove.", "type": "camera", "weight": 3 }, { "content": "On the stove is a stainless steel pot.", "type": "attribute", "weight": 2 }, { "content": "There are some cut pieces of vegetables in the pot.", "type": "attribute", "weight": 2 }, { "content": "A wooden spoon is also in the pot.", "type": "attribute", "weight": 2 }, { "content": "In the background, the kitchen countertop can be seen.", "type": "scene", "weight": 1 }, { "content": "There are some kitchen utensils and spice jars placed on the countertop.", "type": "scene", "weight": 1 } ] }, { "event": "A person wearing a light green short-sleeved shirt and khaki pants appears on the left side of the screen. His right hand picks up the wooden spoon, and his left hand pours the red liquid from a can into the pot. Then he continues to stir with the wooden spoon.", "visual_elements": [ { "content": "A person wearing a light green short-sleeved shirt and khaki pants appears on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The right hand picks up the wooden spoon.", "type": "action", "weight": 3 }, { "content": "The left hand pours the red liquid from a can into the pot.", "type": "action", "weight": 3 }, { "content": "This person continues to stir with the wooden spoon.", "type": "action", "weight": 2 } ] }, { "event": "Subsequently, this person picks up a brown bottle and sprinkles the white seasoning from the bottle into the pot.", "visual_elements": [ { "content": "This person picks up a brown bottle.", "type": "action", "weight": 2 }, { "content": "This person sprinkles the white seasoning from the bottle into the pot.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0613", "video_path": "YouCook2/122/1r7N-G4kYG8/split_12.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 5.04, "resolution": { "width": 720, "height": 480 }, "caption": "At the beginning of the video, the camera is aimed at a kitchen scene. In the background, wooden cabinets, some kitchen appliances and kitchen tools can be seen. A woman wearing a red dress is standing in front of the kitchen countertop, with her hair tied in a ponytail. She is holding a purple and white cardboard box in her hand, with some words and patterns on the box. The woman opens the box.\nNext, the camera switches to a top-down view, showing that there is already some orange-red liquid in the pot, and a metal spoon is placed in the pot. The woman starts to pour the liquid from the box into the pot, and the liquid is milky white.\nAs the milky white liquid is being poured, the camera maintains the top-down view. The woman's left hand holds the spoon to stir the liquid in the pot, and the milky white and orange-red liquids in the pot are mixed together after being stirred.", "events": [ { "event": "At the beginning of the video, the camera is aimed at a kitchen scene. In the background, wooden cabinets, some kitchen appliances and kitchen tools can be seen. A woman wearing a red dress is standing in front of the kitchen countertop, with her hair tied in a ponytail. She is holding a purple and white cardboard box in her hand, with some words and patterns on the box. The woman opens the box.", "visual_elements": [ { "content": "The camera is aimed at a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "In the background, wooden cabinets, some kitchen appliances and kitchen tools can be seen.", "type": "scene", "weight": 2 }, { "content": "A woman wearing a red dress is standing in front of the kitchen countertop.", "type": "scene", "weight": 3 }, { "content": "This woman's hair is tied in a ponytail.", "type": "attribute", "weight": 2 }, { "content": "This woman is holding a purple and white cardboard box in her hand.", "type": "attribute", "weight": 2 }, { "content": "On the box are some words and patterns.", "type": "attribute", "weight": 1 }, { "content": "The woman opens the box.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera switches to a top-down view, showing that there is already some orange-red liquid in the pot, and a metal spoon is placed in the pot. The woman starts to pour the liquid from the box into the pot, and the liquid is milky white.", "visual_elements": [ { "content": "The camera switches to a top-down view,", "type": "camera", "weight": 3 }, { "content": "The scene shows that there is already some orange-red liquid in the pot.", "type": "scene", "weight": 3 }, { "content": "A metal spoon is placed in the pot.", "type": "attribute", "weight": 2 }, { "content": "The woman starts to pour the liquid from the box into the pot.", "type": "action", "weight": 3 }, { "content": "The liquid is milky white.", "type": "attribute", "weight": 2 } ] }, { "event": "As the milky white liquid is being poured, the camera maintains the top-down view. The woman's left hand holds the spoon to stir the liquid in the pot, and the milky white and orange-red liquids in the pot are mixed together after being stirred.", "visual_elements": [ { "content": "The camera maintains the top-down view.", "type": "camera", "weight": 3 }, { "content": "The woman's left hand holds the spoon to stir the liquid in the pot.", "type": "action", "weight": 3 }, { "content": "The milky white and orange-red liquids in the pot are mixed together after being stirred.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0614", "video_path": "YouCook2/122/1r7N-G4kYG8/split_7.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 12.05, "resolution": { "width": 720, "height": 480 }, "caption": "At the beginning of the video, a woman appears in the frame. She is wearing a red dress and standing in front of the kitchen stove. Her hair is tied in a ponytail. The background is a kitchen, with kitchen appliances and some kitchen utensils in it. The cabinets are wooden, and there is a stainless steel pot on the stove.\nThe woman's right hand is holding a transparent plastic container, which contains a white powdery substance. The text beside it shows that it is \"Salt\". The woman picks up a small white spoon with her right hand to scoop the salt and put it into the pot. Then she puts the container aside.\nSubsequently, while talking, the woman picks up the pot lid and then looks at the camera. After looking at the camera, her fingers are also gesturing continuously. Then she puts the pot lid on the pot and presses it with her hand to ensure that the lid is tightly covered.", "events": [ { "event": "At the beginning of the video, a woman appears in the frame. She is wearing a red dress and standing in front of the kitchen stove. Her hair is tied in a ponytail. The background is a kitchen, with kitchen appliances and some kitchen utensils in it. The cabinets are wooden, and there is a stainless steel pot on the stove.", "visual_elements": [ { "content": "A woman appears in the frame.", "type": "scene", "weight": 2 }, { "content": "This woman is wearing a red dress.", "type": "attribute", "weight": 3 }, { "content": "This woman is standing in front of the kitchen stove.", "type": "attribute", "weight": 2 }, { "content": "This woman's hair is tied in a ponytail.", "type": "attribute", "weight": 2 }, { "content": "The background is a kitchen.", "type": "scene", "weight": 2 }, { "content": "In the kitchen are appliances and some kitchen utensils.", "type": "attribute", "weight": 2 }, { "content": "The cabinets are wooden.", "type": "attribute", "weight": 2 }, { "content": "There is a stainless steel pot on the stove.", "type": "attribute", "weight": 2 } ] }, { "event": "The woman's right hand is holding a transparent plastic container, which contains a white powdery substance. The text beside it shows that it is \"Salt\". The woman picks up a small white spoon with her right hand to scoop the salt and put it into the pot. Then she puts the container aside.", "visual_elements": [ { "content": "The woman's right hand is holding a transparent plastic container.", "type": "action", "weight": 2 }, { "content": "The container contains a white powdery substance.", "type": "attribute", "weight": 2 }, { "content": "The text shows that it is \"Salt\".", "type": "attribute", "weight": 1 }, { "content": "The woman picks up a small white spoon with her right hand to scoop the salt to put it into the pot.", "type": "action", "weight": 3 }, { "content": "This woman puts the container aside.", "type": "action", "weight": 2 } ] }, { "event": "Subsequently, while talking, the woman picks up the pot lid and then looks at the camera. After looking at the camera, her fingers are also gesturing continuously. Then she puts the pot lid on the pot and presses it with her hand to ensure that the lid is tightly covered.", "visual_elements": [ { "content": "While talking, the woman picks up the pot lid and then looks at the camera.", "type": "action", "weight": 2 }, { "content": "After looking at the camera, this woman's fingers are also gesturing continuously.", "type": "action", "weight": 2 }, { "content": "This woman puts the pot lid on the pot", "type": "action", "weight": 3 }, { "content": "This woman presses the lid with her hand to ensure that it is tightly covered.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0615", "video_path": "YouCook2/122/4gPBv7le5pw/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 16.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a woman wearing a grey long-sleeved top and a red and white striped apron. She is standing in front of the kitchen countertop, on which there is a cutting board. On the cutting board, there are sliced carrots, celery, a kitchen knife and half an onion. Next to the cutting board is a light green gas stove. The kitchen cabinets and some kitchen utensils can be seen in the background.\nThe woman holds half an onion with both hands and starts to peel off the outer skin of the onion. She puts the peeled onion skins into a transparent bowl.\nThen, the camera switches to a closer angle, focusing on her hands. She continues to peel the onion skin until the white inner layer of the onion is completely exposed.\nThe camera switches again to a slightly farther angle, showing that she starts to cut the onion on the cutting board. She presses the onion with her left hand and holds the knife with her right hand, cutting the onion horizontally into several slices. Then she continues to cut the onion into strips with the knife.", "events": [ { "event": "At the beginning of the video, the camera is focused on a woman wearing a grey long-sleeved top and a red and white striped apron. She is standing in front of the kitchen countertop, on which there is a cutting board. On the cutting board, there are sliced carrots, celery, a kitchen knife and half an onion. Next to the cutting board is a light green gas stove. The kitchen cabinets and some kitchen utensils can be seen in the background.", "visual_elements": [ { "content": "The camera is focused on a woman wearing a grey long-sleeved top and a red and white striped apron.", "type": "camera", "weight": 3 }, { "content": "This woman is standing in front of the kitchen countertop.", "type": "attribute", "weight": 2 }, { "content": "On the countertop, there is a cutting board.", "type": "attribute", "weight": 2 }, { "content": "On the cutting board, there are sliced carrots, celery, a kitchen knife and half an onion.", "type": "attribute", "weight": 3 }, { "content": "Next to the cutting board is a light green gas stove.", "type": "attribute", "weight": 2 }, { "content": "The kitchen cabinets and some kitchen utensils can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "The woman holds half an onion with both hands and starts to peel off the outer skin of the onion. She puts the peeled onion skins into a transparent bowl.", "visual_elements": [ { "content": "The woman holds half an onion with both hands.", "type": "action", "weight": 2 }, { "content": "The woman starts to peel off the outer skin of the onion.", "type": "action", "weight": 2 }, { "content": "The woman puts the peeled onion skins into a transparent bowl.", "type": "action", "weight": 2 } ] }, { "event": "Then, the camera switches to a closer angle, focusing on her hands. She continues to peel the onion skin until the white inner layer of the onion is completely exposed.", "visual_elements": [ { "content": "The camera switches to a closer angle.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the woman's hands.", "type": "camera", "weight": 3 }, { "content": "The woman continues to peel the onion skin until the white inner layer of the onion is completely exposed.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches again to a slightly farther angle, showing that she starts to cut the onion on the cutting board. She presses the onion with her left hand and holds the knife with her right hand, cutting the onion horizontally into several slices. Then she continues to cut the onion into strips with the knife.", "visual_elements": [ { "content": "The camera switches again to a slightly farther angle.", "type": "camera", "weight": 3 }, { "content": "It shows that the woman starts to cut the onion on the cutting board.", "type": "attribute", "weight": 2 }, { "content": "The woman presses the onion with her left hand.", "type": "action", "weight": 2 }, { "content": "The woman holds the knife with her right hand, cutting the onion horizontally into several slices.", "type": "action", "weight": 2 }, { "content": "The woman continues to cut the onion into strips with the knife.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0616", "video_path": "YouCook2/122/FfyOV00sXEY/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 17.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a kitchen scene. The background is a modern kitchen with wooden decorations on the walls. There are several shelves on the left side, on which various spice jars and decorations are placed. Some kitchen utensils and ingredients are placed on the kitchen countertop, including a ceramic pot containing kitchen utensils, a white pot, some knives and seasonings; the ingredients include onions and garlic.\nA woman is standing behind the kitchen countertop. She is wearing a traditional dress with a floral pattern and has an orange scarf draped over her shoulders. She is cooking. She is holding a transparent measuring cup with transparent liquid in her left hand; she pours the liquid into a stainless steel pot placed on the gas stove. She is holding the wooden spatula in the pot with her right hand.\nThe camera switches to a close-up shot of the inside of the pot. It can be seen that the liquid is poured into the pot and mixed with the vegetables and spices. The ingredients in the pot include sliced onions, tomatoes and some spices, such as bay leaves and black peppercorns.\nThe camera switches again to a panoramic view of the kitchen. The woman continues to stir the ingredients in the pot with the wooden spoon. She is explaining while stirring. The camera switches again to a close-up shot of the inside of the pot to show the food in the pot. The camera switches back and forth like this three times, and the soup in the pot gradually becomes thicker.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen scene. The background is a modern kitchen with wooden decorations on the walls. There are several shelves on the left side, on which various spice jars and decorations are placed. Some kitchen utensils and ingredients are placed on the kitchen countertop, including a ceramic pot containing kitchen utensils, a white pot, some knives and seasonings; the ingredients include onions and garlic.", "visual_elements": [ { "content": "The camera is focused on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "The background is a modern kitchen.", "type": "scene", "weight": 1 }, { "content": "There are wooden decorations on the walls.", "type": "attribute", "weight": 1 }, { "content": "There are several shelves on the left side.", "type": "attribute", "weight": 2 }, { "content": "Various spice jars and decorations are placed on the shelf.", "type": "attribute", "weight": 1 }, { "content": "Some kitchen utensils and ingredients are placed on the kitchen countertop.", "type": "attribute", "weight": 2 }, { "content": "Kitchen utensils and ingredients includes a ceramic pot containing kitchen utensils, a white pot, some knives and seasonings.", "type": "attribute", "weight": 2 }, { "content": "The ingredients include onions and garlic.", "type": "attribute", "weight": 2 } ] }, { "event": "A woman is standing behind the kitchen countertop. She is wearing a traditional dress with a floral pattern and has an orange scarf draped over her shoulders. She is cooking. She is holding a transparent measuring cup with transparent liquid in her left hand; she pours the liquid into a stainless steel pot placed on the gas stove. She is holding the wooden spatula in the pot with her right hand.", "visual_elements": [ { "content": "A woman is standing behind the kitchen countertop.", "type": "scene", "weight": 2 }, { "content": "The woman is wearing a traditional dress with a floral pattern.", "type": "attribute", "weight": 2 }, { "content": "There is an orange scarf draped over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "This woman is cooking.", "type": "action", "weight": 3 }, { "content": "This woman is holding a transparent measuring cup in her left hand.", "type": "action", "weight": 3 }, { "content": "In the measuring cup is transparent liquid.", "type": "attribute", "weight": 2 }, { "content": "The woman pours the liquid into a stainless steel pot placed on the gas stove.", "type": "action", "weight": 2 }, { "content": "The woman is holding the wooden spatula in the pot with her right hand.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a close-up shot of the inside of the pot. It can be seen that the liquid is poured into the pot and mixed with the vegetables and spices. The ingredients in the pot include sliced onions, tomatoes and some spices, such as bay leaves and black peppercorns.", "visual_elements": [ { "content": "The camera switches to a close-up shot of the inside of the pot.", "type": "camera", "weight": 3 }, { "content": "It can be seen that the liquid is poured into the pot.", "type": "scene", "weight": 2 }, { "content": "The liquids is mixed with the vegetables and spices.", "type": "attribute", "weight": 1 }, { "content": "The ingredients in the pot include sliced onions, tomatoes and some spices.", "type": "attribute", "weight": 2 }, { "content": "Spices are bay leaves and black peppercorns.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches again to a panoramic view of the kitchen. The woman continues to stir the ingredients in the pot with the wooden spoon. She is explaining while stirring. The camera switches again to a close-up shot of the inside of the pot to show the food in the pot. The camera switches back and forth like this three times, and the soup in the pot gradually becomes thicker.", "visual_elements": [ { "content": "The camera switches again to a panoramic view of the kitchen.", "type": "camera", "weight": 3 }, { "content": "The woman continues to stir the ingredients in the pot with the wooden spoon.", "type": "action", "weight": 3 }, { "content": "The woman is explaining while stirring.", "type": "action", "weight": 3 }, { "content": "The camera switches again to a close-up shot of the inside of the pot.", "type": "camera", "weight": 3 }, { "content": "The camera shows the food in the pot.", "type": "attribute", "weight": 2 }, { "content": "The camera switches back and forth like this three times.", "type": "camera", "weight": 3 }, { "content": "The soup in the pot gradually becomes thicker.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0617", "video_path": "YouCook2/122/FfyOV00sXEY/split_7.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 26.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a kitchen scene. The background is a wooden wall with some decorations and storage jars on it. Various kitchen utensils and ingredients are placed on the kitchen countertop. In the center of the picture is a woman. She is wearing a traditional dress with a floral pattern and has an orange scarf draped over her shoulders.\nThe woman is standing behind the kitchen countertop. There are three gas stoves and some pots and pans in front of her. She is using a wooden spoon to stir the ingredients in the pot. Then, she picks up a blender container with her left hand and a strainer with her right hand.\nNext, the camera switches to a close-up shot, showing that she pours the liquid from the blender container into the pot through a strainer. The liquid is brownish in color, and the original milky white liquid in the pot can be seen.\nThe camera switches again to a panoramic view. The woman is speaking while gently stirring the residue in the strainer with a wooden spoon, ensuring that the liquid is completely filtered into the pot.\nThe camera switches again to a close-up shot, showing that she stirs the residue in the strainer with a wooden spoon, and the filtered orange liquid continues to flow into the pot. The camera moves upward to capture the woman's action of stirring the strainer. Then the camera zooms out and then zooms in again, always focusing on the woman's filtering action.\nFinally, the camera returns to a panoramic view. The woman continues to stir the ingredients in the pot with a wooden spoon while explaining. Then she puts down the wooden spoon.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen scene. The background is a wooden wall with some decorations and storage jars on it. Various kitchen utensils and ingredients are placed on the kitchen countertop. In the center of the picture is a woman. She is wearing a traditional dress with a floral pattern and has an orange scarf draped over her shoulders.", "visual_elements": [ { "content": "The camera is focused on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "The background is a wooden wall.", "type": "scene", "weight": 2 }, { "content": "On the wall are some decorations and storage jars.", "type": "attribute", "weight": 1 }, { "content": "Various kitchen utensils and ingredients are placed on the kitchen countertop.", "type": "attribute", "weight": 1 }, { "content": "In the center of the picture is a woman.", "type": "scene", "weight": 3 }, { "content": "The woman is wearing a traditional dress with a floral pattern.", "type": "attribute", "weight": 3 }, { "content": "There is an orange scarf draped over the woman's shoulders.", "type": "attribute", "weight": 3 } ] }, { "event": "The woman is standing behind the kitchen countertop. There are three gas stoves and some pots and pans in front of her. She is using a wooden spoon to stir the ingredients in the pot. Then, she picks up a blender container with her left hand and a strainer with her right hand.", "visual_elements": [ { "content": "The woman is standing behind the kitchen countertop.", "type": "attribute", "weight": 1 }, { "content": "There are three gas stoves and some pots and pans in front of the woman.", "type": "attribute", "weight": 2 }, { "content": "The woman is using a wooden spoon to stir the ingredients in the pot.", "type": "action", "weight": 3 }, { "content": "The woman picks up a blender container with her left hand", "type": "action", "weight": 3 }, { "content": "The woman picks up a strainer with her right hand.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to a close-up shot, showing that she pours the liquid from the blender container into the pot through a strainer. The liquid is brownish in color, and the original milky white liquid in the pot can be seen.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "It shows that the woman pours the liquid from the blender container into the pot through a strainer.", "type": "action", "weight": 3 }, { "content": "The liquid is brownish in color.", "type": "attribute", "weight": 2 }, { "content": "The original milky white liquid in the pot can be seen.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches again to a panoramic view. The woman is speaking while gently stirring the residue in the strainer with a wooden spoon, ensuring that the liquid is completely filtered into the pot.", "visual_elements": [ { "content": "The camera switches again to a panoramic view.", "type": "camera", "weight": 3 }, { "content": "The woman is speaking while gently stirring the residue in the strainer with a wooden spoon.", "type": "action", "weight": 3 }, { "content": "It ensures that the liquid is completely filtered into the pot.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches again to a close-up shot, showing that she stirs the residue in the strainer with a wooden spoon, and the filtered orange liquid continues to flow into the pot. The camera moves upward to capture the woman's action of stirring the strainer. Then the camera zooms out and then zooms in again, always focusing on the woman's filtering action.", "visual_elements": [ { "content": "The camera switches again to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The scene shows that the woman stirs the residue in the strainer with a wooden spoon.", "type": "scene", "weight": 3 }, { "content": "The filtered orange liquid continues to flow into the pot.", "type": "attribute", "weight": 2 }, { "content": "The camera moves upward to capture the woman's action of stirring the strainer.", "type": "camera", "weight": 3 }, { "content": "The camera zooms out and then zooms in again, always focusing on the woman's filtering action.", "type": "camera", "weight": 3 } ] }, { "event": "Finally, the camera returns to a panoramic view. The woman continues to stir the ingredients in the pot with a wooden spoon while explaining. Then she puts down the wooden spoon.", "visual_elements": [ { "content": "The camera returns to a panoramic view.", "type": "camera", "weight": 3 }, { "content": "The woman continues to stir the ingredients in the pot.", "type": "action", "weight": 3 }, { "content": "The woman stirs with a wooden spoon as explaining.", "type": "action", "weight": 2 }, { "content": "The woman puts down the wooden spoon.", "type": "action", "weight": 1 } ] } ], "n_events": 6, "n_elements": 28 }, { "index": "TUNA_0618", "video_path": "YouCook2/122/G_oxCFd1krc/split_2.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 23.06, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden cutting board. There is a left hand wearing a silver ring in the picture, and the right hand is holding a knife. The camera moves to the left. The right hand smashes the garlic with the back of the knife while the left hand presses on the blade of the knife. This action is repeated twice.\nThe camera switches to a medium shot, showing a woman wearing a white short-sleeved top. She is standing in front of the kitchen countertop and continues to handle the onions and garlic. The kitchen equipment and some storage jars can be seen in the background. The environment is clean and bright.\nThe camera switches again to a close-up shot. The hands continue to mix the sliced onions and garlic together. Then, the camera switches to a tray filled with halved tomatoes. The hands evenly sprinkle the sliced onions and garlic on the tomatoes.\nThe camera returns to a medium shot. The woman continues to be busy in front of the countertop. She puts the remaining onions and garlic into the tray. The camera switches again to a close-up shot, showing the mixed state of the tomatoes, onions and garlic in the tray. The hands gently turn over the contents in the tray so that the onions and garlic are more evenly distributed on the tomatoes. The camera switches between medium and close-up shots as the woman repeats this action once more. Finally, the camera focuses on the ingredients in the tray.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden cutting board. There is a left hand wearing a silver ring in the picture, and the right hand is holding a knife. The camera moves to the left. The right hand smashes the garlic with the back of the knife while the left hand presses on the blade of the knife. This action is repeated twice.", "visual_elements": [ { "content": "The camera is focused on a wooden cutting board.", "type": "camera", "weight": 3 }, { "content": "There is a left hand wearing a silver ring in the picture.", "type": "attribute", "weight": 2 }, { "content": "The person's right hand is holding a knife.", "type": "attribute", "weight": 2 }, { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The right hand smashes the garlic with the back of the knife while the left hand presses on the blade of the knife.", "type": "action", "weight": 3 }, { "content": "This action is repeated twice.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to a medium shot, showing a woman wearing a white short-sleeved top. She is standing in front of the kitchen countertop and continues to handle the onions and garlic. The kitchen equipment and some storage jars can be seen in the background. The environment is clean and bright.", "visual_elements": [ { "content": "The camera switches to a medium shot.", "type": "camera", "weight": 3 }, { "content": "It shows a woman wearing a white short-sleeved top.", "type": "attribute", "weight": 2 }, { "content": "The woman is standing in front of the kitchen countertop.", "type": "attribute", "weight": 2 }, { "content": "The woman continues to handle the onions and garlic.", "type": "action", "weight": 3 }, { "content": "The kitchen equipment and some storage jars can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The environment is clean and bright.", "type": "scene", "weight": 1 } ] }, { "event": "The camera switches again to a close-up shot. The hands continue to mix the sliced onions and garlic together. Then, the camera switches to a tray filled with halved tomatoes. The hands evenly sprinkle the sliced onions and garlic on the tomatoes.", "visual_elements": [ { "content": "The camera switches again to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The hands continue to mix the sliced onions and garlic together.", "type": "action", "weight": 3 }, { "content": "The camera switches to a tray.", "type": "camera", "weight": 3 }, { "content": "There are filled with halved tomatoes on the tray.", "type": "attribute", "weight": 3 }, { "content": "The hands evenly sprinkle the sliced onions and garlic on the tomatoes.", "type": "action", "weight": 2 } ] }, { "event": "The camera returns to a medium shot. The woman continues to be busy in front of the countertop. She puts the remaining onions and garlic into the tray. The camera switches again to a close-up shot, showing the mixed state of the tomatoes, onions and garlic in the tray. The hands gently turn over the contents in the tray so that the onions and garlic are more evenly distributed on the tomatoes. The camera switches between medium and close-up shots as the woman repeats this action once more. Finally, the camera focuses on the ingredients in the tray.", "visual_elements": [ { "content": "The camera returns to a medium shot.", "type": "camera", "weight": 3 }, { "content": "The woman continues to be busy in front of the countertop.", "type": "action", "weight": 3 }, { "content": "The woman puts the remaining onions and garlic into the tray.", "type": "action", "weight": 3 }, { "content": "The camera switches again to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "It shows the mixed state of the tomatoes, onions and garlic in the tray.", "type": "scene", "weight": 2 }, { "content": "The hands gently turn over the contents in the tray so that the onions and garlic are more evenly distributed on the tomatoes.", "type": "action", "weight": 2 }, { "content": "The camera switches between medium and close-up shots.", "type": "camera", "weight": 3 }, { "content": "The woman repeats this action once more.", "type": "action", "weight": 2 }, { "content": "Finally, the camera focuses on the ingredients in the tray.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 26 }, { "index": "TUNA_0619", "video_path": "YouCook2/122/Ghefa-Q18Tg/split_10.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 20.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a large red pot filled with orange-red liquid. The camera moves slightly to the right, and a person wearing a light blue top is standing beside the pot, holding a transparent measuring cup filled with white liquid. She slowly pours the white liquid from the measuring cup into the pot, and the liquid mixes with the orange-red liquid inside the pot.\nThen, the camera zooms out, and this person starts to stir the liquid in the pot with a wooden spoon. The camera then moves up significantly, and the picture shows a woman wearing a light blue top. She is stirring the liquid in the pot with a wooden spoon in her right hand, while making gestures with her left hand and talking. The kitchen cabinets, stoves and some pots hanging on the wall can be seen in the background.\nThen, the camera moves down and to the left while zooming in. This woman keeps stirring the liquid in the pot with the wooden spoon to evenly mix the orange-red liquid and the white liquid.\nThe camera switches again, and the picture shows the woman standing beside the pot, continuing to stir the liquid in the pot. There is a silver spoon on the tabletop beside the pot.", "events": [ { "event": "At the beginning of the video, the camera is focused on a large red pot filled with orange-red liquid. The camera moves slightly to the right, and a person wearing a light blue top is standing beside the pot, holding a transparent measuring cup filled with white liquid. She slowly pours the white liquid from the measuring cup into the pot, and the liquid mixes with the orange-red liquid inside the pot.", "visual_elements": [ { "content": "The camera is focused on a large red pot.", "type": "camera", "weight": 3 }, { "content": "In the pot is filled with orange-red liquid.", "type": "attribute", "weight": 3 }, { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "A person wearing a light blue top is standing beside the pot.", "type": "scene", "weight": 2 }, { "content": "This person holds a transparent measuring cup.", "type": "attribute", "weight": 2 }, { "content": "There is filled with white liquid in the cup.", "type": "attribute", "weight": 2 }, { "content": "The woman slowly pours the white liquid from the measuring cup into the pot.", "type": "action", "weight": 3 }, { "content": "The liquid mixes with the orange-red liquid inside the pot.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera zooms out, and this person starts to stir the liquid in the pot with a wooden spoon. The camera then moves up significantly, and the picture shows a woman wearing a light blue top. She is stirring the liquid in the pot with a wooden spoon in her right hand, while making gestures with her left hand and talking. The kitchen cabinets, stoves and some pots hanging on the wall can be seen in the background.", "visual_elements": [ { "content": "The camera zooms out.", "type": "camera", "weight": 3 }, { "content": "This person starts to stir the liquid in the pot with a wooden spoon.", "type": "action", "weight": 2 }, { "content": "The camera then moves up significantly.", "type": "camera", "weight": 3 }, { "content": "The picture shows a woman wearing a light blue top.", "type": "scene", "weight": 2 }, { "content": "The woman is stirring the liquid in the pot with a wooden spoon in her right hand.", "type": "action", "weight": 2 }, { "content": "The woman is making gestures with her left hand and talking.", "type": "action", "weight": 3 }, { "content": "The kitchen cabinets, stoves and some pots hanging on the wall can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves down and to the left while zooming in. This woman keeps stirring the liquid in the pot with the wooden spoon to evenly mix the orange-red liquid and the white liquid.", "visual_elements": [ { "content": "The camera moves down and to the left while zooming in.", "type": "camera", "weight": 3 }, { "content": "This woman keeps stirring the liquid in the pot with the wooden spoon.", "type": "action", "weight": 3 }, { "content": "Stirring makes the orange-red liquid and the white liquid mixed evenly.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches again, and the picture shows the woman standing beside the pot, continuing to stir the liquid in the pot. There is a silver spoon on the tabletop beside the pot.", "visual_elements": [ { "content": "The camera switches again.", "type": "camera", "weight": 3 }, { "content": "The picture shows the woman standing beside the pot, continuing to stir the liquid in the pot.", "type": "scene", "weight": 3 }, { "content": "There is a silver spoon on the tabletop beside the pot.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0620", "video_path": "YouCook2/122/Ghefa-Q18Tg/split_4.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 10.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a woman. She is wearing a light blue short-sleeved top and a white vest underneath, standing in front of the kitchen countertop. Her hair is tied into a simple bun, and she is wearing a pair of small earrings. There is a red casserole on the countertop in front of her, and there are some orange-red liquids in the casserole.\nThe camera moves down, and the woman takes some green leaves with both hands and puts them into the red casserole. The kitchen cabinets, microwave ovens, pots hanging on the wall and the sink can be seen in the background. There are also some kitchen utensils and condiments placed on the countertop.\nNext, the camera zooms in slightly. The woman picks up a wooden spoon and stirs in the pot to make the green leaves completely soaked in the liquid.\nThen, the camera shakes slightly and moves up. The woman looks down into the pot, talking while stirring.", "events": [ { "event": "At the beginning of the video, the camera is focused on a woman. She is wearing a light blue short-sleeved top and a white vest underneath, standing in front of the kitchen countertop. Her hair is tied into a simple bun, and she is wearing a pair of small earrings. There is a red casserole on the countertop in front of her, and there are some orange-red liquids in the casserole.", "visual_elements": [ { "content": "The camera is focused on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a light blue short-sleeved top and a white vest underneath.", "type": "attribute", "weight": 3 }, { "content": "The woman is standing in front of the kitchen countertop.", "type": "attribute", "weight": 2 }, { "content": "The woman's hair is tied into a simple bun.", "type": "attribute", "weight": 3 }, { "content": "The woman is wearing a pair of small earrings.", "type": "attribute", "weight": 2 }, { "content": "There is a red casserole on the countertop in front of the woman.", "type": "attribute", "weight": 3 }, { "content": "There are some orange-red liquids in the casserole.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera moves down, and the woman takes some green leaves with both hands and puts them into the red casserole. The kitchen cabinets, microwave ovens, pots hanging on the wall and the sink can be seen in the background. There are also some kitchen utensils and condiments placed on the countertop.", "visual_elements": [ { "content": "The camera moves down.", "type": "camera", "weight": 3 }, { "content": "The woman takes some green leaves with both hands.", "type": "action", "weight": 2 }, { "content": "The woman puts leaves into the red casserole.", "type": "action", "weight": 3 }, { "content": "The kitchen cabinets, microwave ovens, pots hanging on the wall and the sink can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "There are also some kitchen utensils and condiments placed on the countertop.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera zooms in slightly. The woman picks up a wooden spoon and stirs in the pot to make the green leaves completely soaked in the liquid.", "visual_elements": [ { "content": "The camera zooms in slightly.", "type": "camera", "weight": 3 }, { "content": "The woman picks up a wooden spoon and stirs in the pot to make the green leaves completely soaked in the liquid.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera shakes slightly and moves up. The woman looks down into the pot, talking while stirring.", "visual_elements": [ { "content": "The camera shakes slightly and moves up.", "type": "camera", "weight": 3 }, { "content": "The woman looks down into the pot, talking while stirring.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0621", "video_path": "YouCook2/122/Ghefa-Q18Tg/split_7.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 8.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a woman. She is wearing a light blue short-sleeved top and a white apron, standing in front of the kitchen countertop. She is holding a white hand-held mixer in her right hand, and making gestures with her left hand while talking.\nThe camera moves down. There is red liquid in the red casserole on the countertop. The kitchen cabinets, microwave ovens, stoves and some pots hanging on the wall can be seen in the background. There are also some condiments and tools placed on the adjacent countertop.\nThe camera zooms in. The woman inserts the mixer into the red liquid in the casserole and starts to stir. The stirring head of the mixer moves up and down in the liquid, and ripples and bubbles appear on the surface of the liquid.\nThe camera switches the perspective and focuses on the casserole and the mixer. It can be clearly seen that the texture of the red liquid becomes more even and fine. The woman continues to stir with the mixer, and the red texture in the pot is steaming.", "events": [ { "event": "At the beginning of the video, the camera is focused on a woman. She is wearing a light blue short-sleeved top and a white apron, standing in front of the kitchen countertop. She is holding a white hand-held mixer in her right hand, and making gestures with her left hand while talking.", "visual_elements": [ { "content": "The camera is focused on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a light blue short-sleeved top and a white apron.", "type": "attribute", "weight": 3 }, { "content": "The woman is standing in front of the kitchen countertop.", "type": "action", "weight": 2 }, { "content": "The woman is holding a white hand-held mixer in her right hand.", "type": "action", "weight": 3 }, { "content": "The woman is making gestures with her left hand while talking.", "type": "action", "weight": 2 } ] }, { "event": "The camera moves down. There is red liquid in the red casserole on the countertop. The kitchen cabinets, microwave ovens, stoves and some pots hanging on the wall can be seen in the background. There are also some condiments and tools placed on the adjacent countertop.", "visual_elements": [ { "content": "The camera moves down.", "type": "camera", "weight": 3 }, { "content": "There is red liquid in the red casserole on the countertop.", "type": "attribute", "weight": 3 }, { "content": "The kitchen cabinets, microwave ovens, stoves and some pots hanging on the wall can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "There are also some condiments and tools placed on the adjacent countertop.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera zooms in. The woman inserts the mixer into the red liquid in the casserole and starts to stir. The stirring head of the mixer moves up and down in the liquid, and ripples and bubbles appear on the surface of the liquid.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The woman inserts the mixer into the red liquid in the casserole.", "type": "action", "weight": 3 }, { "content": "The woman starts to stir.", "type": "action", "weight": 3 }, { "content": "The stirring head of the mixer moves up and down in the liquid.", "type": "action", "weight": 3 }, { "content": "The ripples and bubbles appear on the surface of the liquid.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches the perspective and focuses on the casserole and the mixer. It can be clearly seen that the texture of the red liquid becomes more even and fine. The woman continues to stir with the mixer, and the red texture in the pot is steaming.", "visual_elements": [ { "content": "The camera switches the perspective.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the casserole and the mixer.", "type": "camera", "weight": 3 }, { "content": "It can be clearly seen that the texture of the red liquid becomes more even and fine.", "type": "attribute", "weight": 1 }, { "content": "The woman continues to stir with the mixer.", "type": "action", "weight": 3 }, { "content": "The red texture in the pot is steaming.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0622", "video_path": "YouCook2/122/LV943DshjOc/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 25.03, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a kitchen countertop, on which various ingredients and kitchen utensils are placed. A person wearing orange clothes is standing in front of the countertop. There are silver bracelets on this person's arms. This person is holding a stainless steel pot with a spoon. There is red liquid in the pot. There is a white strainer and several transparent bowls beside the pot. According to the subtitle \"We can also cut the tomato and boil it in the same way.\", the red liquid is tomato sauce.\nNext, this person pours the steaming tomato juice from the pot into the white strainer. Then, the right hand holds the strainer high, and the left hand holds a large spoon to stir the tomato sauce in the strainer to make it pass through the strainer faster. The subtitle \"It can be prepared by any one of the way.\" appears in the middle of the bottom of the video.\nThe camera switches. The picture shows that this person has completely filtered out the tomato sauce. The subtitle \"Press the tomatoes to take out the juice.\" appears in the video. Then, this person places the strainer and the large spoon in two transparent bowls on the edge of the countertop. Then, this person pours the tomato sauce from the white bowl back into the stainless steel pot. Then, this person puts the empty bowl aside and picks up the pot on the stove, preparing for the next operation.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen countertop, on which various ingredients and kitchen utensils are placed. A person wearing orange clothes is standing in front of the countertop. There are silver bracelets on this person's arms. This person is holding a stainless steel pot with a spoon. There is red liquid in the pot. There is a white strainer and several transparent bowls beside the pot. According to the subtitle \"We can also cut the tomato and boil it in the same way.\", the red liquid is tomato sauce.", "visual_elements": [ { "content": "The camera is focused on a kitchen countertop.", "type": "camera", "weight": 3 }, { "content": "On the countertop various ingredients and kitchen utensils are placed.", "type": "attribute", "weight": 1 }, { "content": "A person wearing orange clothes is standing in front of the countertop.", "type": "attribute", "weight": 2 }, { "content": "There are silver bracelets on this person's arms.", "type": "attribute", "weight": 1 }, { "content": "This person is holding a stainless steel pot with a spoon.", "type": "action", "weight": 3 }, { "content": "There is red liquid in the pot.", "type": "attribute", "weight": 2 }, { "content": "There is a white strainer and several transparent bowls beside the pot.", "type": "attribute", "weight": 2 }, { "content": "According to the subtitle \"We can also cut the tomato and boil it in the same way.\", the red liquid is tomato sauce.", "type": "attribute", "weight": 1 } ] }, { "event": "Next, this person pours the steaming tomato juice from the pot into the white strainer. Then, the right hand holds the strainer high, and the left hand holds a large spoon to stir the tomato sauce in the strainer to make it pass through the strainer faster. The subtitle \"It can be prepared by any one of the way.\" appears in the middle of the bottom of the video.", "visual_elements": [ { "content": "This person pours the steaming tomato juice from the pot into the white strainer.", "type": "action", "weight": 3 }, { "content": "The right hand holds the strainer high.", "type": "action", "weight": 3 }, { "content": "The left hand holds a large spoon to stir the tomato sauce in the strainer to make it pass through the strainer faster.", "type": "action", "weight": 3 }, { "content": "The subtitle \"It can be prepared by any one of the way.\" appears in the middle of the bottom of the video.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera switches. The picture shows that this person has completely filtered out the tomato sauce. The subtitle \"Press the tomatoes to take out the juice.\" appears in the video. Then, this person places the strainer and the large spoon in two transparent bowls on the edge of the countertop. Then, this person pours the tomato sauce from the white bowl back into the stainless steel pot. Then, this person puts the empty bowl aside and picks up the pot on the stove, preparing for the next operation.", "visual_elements": [ { "content": "The camera switches.", "type": "camera", "weight": 3 }, { "content": "The picture shows that this person has completely filtered out the tomato sauce.", "type": "attribute", "weight": 1 }, { "content": "The subtitle \"Press the tomatoes to take out the juice.\" appears in the video.", "type": "attribute", "weight": 1 }, { "content": "This person places the strainer and the large spoon in two transparent bowls on the edge of the countertop.", "type": "action", "weight": 3 }, { "content": "This person pours the tomato sauce from the white bowl back into the stainless steel pot.", "type": "action", "weight": 3 }, { "content": "This person puts the empty bowl aside.", "type": "action", "weight": 2 }, { "content": "This person picks up the pot on the stove.", "type": "action", "weight": 1 }, { "content": "This person is preparing for the next operation.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0623", "video_path": "YouCook2/122/SsafTopywFo/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 23.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on an elderly man. He is wearing a black T-shirt with red and white words and patterns printed on it. He is standing beside the kitchen countertop. The kitchen cabinets, microwave ovens and other kitchen utensils can be seen in the background.\nNext, the camera switches the perspective and moves down to give a close-up of this man's hands handling a plate of red tomatoes. He crushes the tomatoes on the plate one by one with both hands. There is a knife and a piece of paper towel beside the plate.\nThen, the camera switches to a pot. There are cut potato pieces, onions and a green pepper in the pot. The camera zooms out. The pot is placed on the stove. Then, he pours the crushed tomatoes from the plate into the pot. The camera zooms in. The tomato juice and crushed tomato pieces slowly flow into the pot, covering the potatoes and onions.\nFinally, the camera zooms out again and moves to the left. He pours the last bit of tomatoes from the plate into the pot and scrapes the plate clean with his hand.", "events": [ { "event": "At the beginning of the video, the camera is focused on an elderly man. He is wearing a black T-shirt with red and white words and patterns printed on it. He is standing beside the kitchen countertop. The kitchen cabinets, microwave ovens and other kitchen utensils can be seen in the background.", "visual_elements": [ { "content": "The camera is focused on an elderly man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black T-shirt.", "type": "attribute", "weight": 2 }, { "content": "Red and white words and patterns are printed on the T-shirt.", "type": "attribute", "weight": 1 }, { "content": "The man is standing beside the kitchen countertop.", "type": "action", "weight": 2 }, { "content": "The kitchen cabinets, microwave ovens and other kitchen utensils can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera switches the perspective and moves down to give a close-up of this man's hands handling a plate of red tomatoes. He crushes the tomatoes on the plate one by one with both hands. There is a knife and a piece of paper towel beside the plate.", "visual_elements": [ { "content": "The camera switches the perspective and moves down.", "type": "camera", "weight": 3 }, { "content": "The camera gives a close-up of this man's hands handling a plate of red tomatoes.", "type": "camera", "weight": 3 }, { "content": "The man crushes the tomatoes on the plate one by one with both hands.", "type": "action", "weight": 3 }, { "content": "There is a knife and a piece of paper towel beside the plate.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera switches to a pot. There are cut potato pieces, onions and a green pepper in the pot. The camera zooms out. The pot is placed on the stove. Then, he pours the crushed tomatoes from the plate into the pot. The camera zooms in. The tomato juice and crushed tomato pieces slowly flow into the pot, covering the potatoes and onions.", "visual_elements": [ { "content": "The camera switches to a pot.", "type": "camera", "weight": 3 }, { "content": "There are cut potato pieces, onions and a green pepper in the pot.", "type": "attribute", "weight": 2 }, { "content": "The camera zooms out.", "type": "camera", "weight": 3 }, { "content": "The pot is placed on the stove.", "type": "attribute", "weight": 1 }, { "content": "This man pours the crushed tomatoes from the plate into the pot.", "type": "action", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The tomato juice and crushed tomato pieces slowly flow into the pot, covering the potatoes and onions.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera zooms out again and moves to the left. He pours the last bit of tomatoes from the plate into the pot and scrapes the plate clean with his hand.", "visual_elements": [ { "content": "The camera zooms out again and moves to the left.", "type": "camera", "weight": 3 }, { "content": "This man pours the last bit of tomatoes from the plate into the pot.", "type": "action", "weight": 2 }, { "content": "This man scrapes the plate clean with his hand.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0624", "video_path": "YouCook2/122/danq5zt1zlQ/split_5.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 21.02, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a pot filled with red liquid. The liquid is steaming in the pot. Then, the camera captures a hand holding a white container, and white liquid is poured out of the container and slowly poured into the red liquid.\nNext, the camera captures a hand holding a wooden spoon, which starts to stir the liquid in the pot. The red and white liquids gradually mix to form an even orange liquid.\nThen, the camera captures that there are some green herbs stuck on the wooden spoon. A piece of butter is put into the pot along the wooden spoon, and the wooden spoon continues to stir.", "events": [ { "event": "At the beginning of the video, the camera is focused on a pot filled with red liquid. The liquid is steaming in the pot. Then, the camera captures a hand holding a white container, and white liquid is poured out of the container and slowly poured into the red liquid.", "visual_elements": [ { "content": "The camera is focused on a pot filled with red liquid.", "type": "camera", "weight": 3 }, { "content": "The liquid is steaming in the pot.", "type": "attribute", "weight": 2 }, { "content": "The camera captures a hand holding a white container.", "type": "camera", "weight": 3 }, { "content": "White liquid is poured out of the container.", "type": "action", "weight": 3 }, { "content": "The white liquid is slowly poured into the red liquid.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera captures a hand holding a wooden spoon, which starts to stir the liquid in the pot. The red and white liquids gradually mix to form an even orange liquid.", "visual_elements": [ { "content": "The camera captures a hand holding a wooden spoon.", "type": "camera", "weight": 3 }, { "content": "The wooden spoon starts to stir the liquid in the pot.", "type": "action", "weight": 3 }, { "content": "The red and white liquids gradually mix.", "type": "attribute", "weight": 2 }, { "content": "The mixture forms an even orange liquid.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera captures that there are some green herbs stuck on the wooden spoon. A piece of butter is put into the pot along the wooden spoon, and the wooden spoon continues to stir.", "visual_elements": [ { "content": "The camera captures that there are some green herbs stuck on the wooden spoon.", "type": "camera", "weight": 3 }, { "content": "A piece of butter is put into the pot along the wooden spoon.", "type": "action", "weight": 3 }, { "content": "The wooden spoon continues to stir.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0625", "video_path": "YouCook2/124/dsI9QVhmsGg/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 8.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a barbecue grill with a metal grill net placed on it. A hand wearing black gloves is holding a sausage wrapped with bacon and places it on the grill net. In the background, there are burning charcoal briquettes underneath the barbecue grill, emitting a faint red glow.\nNext, the hand continues neatly arranging another sausage on the grill net.\nThen, the camera switches to a middle-aged man. In the frame, the man is standing beside the barbecue grill, wearing a black T-shirt with the words \"BBQFOOD4U\" printed on it. The man is standing in front of the camera, with some trees and another barbecue equipment in the background. Smoke is rising from the barbecue equipment. The man is making gestures and talking at the same time.", "events": [ { "event": "At the beginning of the video, the camera is focused on a barbecue grill with a metal grill net placed on it. A hand wearing black gloves is holding a sausage wrapped with bacon and places it on the grill net. In the background, there are burning charcoal briquettes underneath the barbecue grill, emitting a faint red glow.", "visual_elements": [ { "content": "The camera is focused on a barbecue grill.", "type": "camera", "weight": 3 }, { "content": "There is a metal grill net placed on the grill.", "type": "attribute", "weight": 2 }, { "content": "A hand wearing black gloves is holding a sausage wrapped with bacon.", "type": "action", "weight": 3 }, { "content": "The hand places the sausage on the grill net.", "type": "action", "weight": 3 }, { "content": "In the background, there are burning charcoal briquettes underneath the barbecue grill.", "type": "scene", "weight": 1 }, { "content": "The burning charcoal emits a faint red glow.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the hand continues neatly arranging another sausage on the grill net.", "visual_elements": [ { "content": "The hand continues neatly arranging another sausage on the grill net.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches to a middle-aged man. In the frame, the man is standing beside the barbecue grill, wearing a black T-shirt with the words \"BBQFOOD4U\" printed on it. The man is standing in front of the camera, with some trees and another barbecue equipment in the background. Smoke is rising from the barbecue equipment. The man is making gestures and talking at the same time.", "visual_elements": [ { "content": "The camera switches to a middle-aged man.", "type": "camera", "weight": 3 }, { "content": "In the frame, the man is standing beside the barbecue grill.", "type": "scene", "weight": 3 }, { "content": "This man is wearing a black T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The words \"BBQFOOD4U\" are printed on the T-shirt.", "type": "attribute", "weight": 1 }, { "content": "The man is standing in front of the camera.", "type": "attribute", "weight": 2 }, { "content": "There are some trees and another barbecue equipment in the background.", "type": "scene", "weight": 2 }, { "content": "Smoke is rising from the barbecue equipment.", "type": "attribute", "weight": 1 }, { "content": "The man is making gestures and talking at the same time.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0626", "video_path": "YouCook2/124/qOB709GWcE8/split_3.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic", "domain": "Cooking", "duration": 16.05, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a wooden table, on which various ingredients and seasonings are placed. There are three hot dog buns, several grilled sausages, a bottle of Heinz yellow mustard sauce, a bottle of celery salt, a cup of beverage, as well as some sliced tomatoes, onions, green peppers and pickles on the table.\nA man wearing a dark T-shirt is standing behind the table, and his hands appear in the frame. The camera zooms in. The man picks up a grilled sausage from the plate with his right hand and puts it into one of the hot dog buns. Then, he picks up another sausage and puts it into the second hot dog bun. Then, he repeats this action and puts the third sausage into the last hot dog bun.\nAfter the man had placed the sausages, he picks up the bottle of Heinz yellow mustard sauce with his right hand and opens the cap with his left hand. Then, he squeezes the mustard sauce onto the third hot dog. It can be seen in the frame that the yellow mustard sauce evenly covers the sausage. Then, he continues to squeeze the mustard sauce onto the second hot dog.\nThe background was a window with blinds, and the light from outside the window shone through the blinds.A white door is on the left side of the table, and the doorknob is golden.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden table, on which various ingredients and seasonings are placed. There are three hot dog buns, several grilled sausages, a bottle of Heinz yellow mustard sauce, a bottle of celery salt, a cup of beverage, as well as some sliced tomatoes, onions, green peppers and pickles on the table.", "visual_elements": [ { "content": "The camera is focused on a wooden table.", "type": "camera", "weight": 3 }, { "content": "Various ingredients and seasonings are placed on the table.", "type": "attribute", "weight": 2 }, { "content": "There are three hot dog buns, several grilled sausages, a bottle of Heinz yellow mustard sauce, a bottle of celery salt, a cup of beverage.", "type": "attribute", "weight": 1 }, { "content": "On the table are also some sliced tomatoes, onions, green peppers and pickles on the table.", "type": "attribute", "weight": 1 } ] }, { "event": "A man wearing a dark T-shirt is standing behind the table, and his hands appear in the frame. The camera zooms in. The man picks up a grilled sausage from the plate with his right hand and puts it into one of the hot dog buns. Then, he picks up another sausage and puts it into the second hot dog bun. Then, he repeats this action and puts the third sausage into the last hot dog bun.", "visual_elements": [ { "content": "A man wearing a dark T-shirt is standing behind the table.", "type": "scene", "weight": 2 }, { "content": "The man's hands appear in the frame.", "type": "scene", "weight": 2 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The man picks up a grilled sausage from the plate with his right hand.", "type": "action", "weight": 3 }, { "content": "The man puts the sausage into one of the hot dog buns.", "type": "action", "weight": 3 }, { "content": "The man picks up another sausage and puts it into the second hot dog bun.", "type": "action", "weight": 3 }, { "content": "The man repeats this action.", "type": "action", "weight": 3 }, { "content": "The man puts the third sausage into the last hot dog bun.", "type": "action", "weight": 3 } ] }, { "event": "After the man had placed the sausages, he picks up the bottle of Heinz yellow mustard sauce with his right hand and opens the cap with his left hand. Then, he squeezes the mustard sauce onto the third hot dog. It can be seen in the frame that the yellow mustard sauce evenly covers the sausage. Then, he continues to squeeze the mustard sauce onto the second hot dog.", "visual_elements": [ { "content": "The man had placed the sausages.", "type": "action", "weight": 3 }, { "content": "The man picks up the bottle of Heinz yellow mustard sauce with his right hand.", "type": "action", "weight": 3 }, { "content": "The man opens the cap with his left hand.", "type": "action", "weight": 3 }, { "content": "The man squeezes the mustard sauce onto the third hot dog.", "type": "action", "weight": 3 }, { "content": "It can be seen in the frame that the yellow mustard sauce evenly covers the sausage.", "type": "scene", "weight": 2 }, { "content": "The man continues to squeeze the mustard sauce onto the second hot dog.", "type": "action", "weight": 3 } ] }, { "event": "The background was a window with blinds, and the light from outside the window shone through the blinds.A white door is on the left side of the table, and the doorknob is golden.", "visual_elements": [ { "content": "The background is a window with blinds.", "type": "scene", "weight": 2 }, { "content": "The light from outside the window shone through the blinds.", "type": "attribute", "weight": 1 }, { "content": "A white door is on the left side of the table.", "type": "attribute", "weight": 1 }, { "content": "The doorknob is golden.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0627", "video_path": "YouCook2/126/WWwcQ6kiKtA/split_1.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 21.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a woman who is standing in front of the kitchen countertop. She is wearing a black-and-white short-sleeved top and light-colored pants. The background is a modern kitchen with wooden cabinets and a marble countertop. There is a white blender on the left side, and a sink and some kitchen utensils near the window on the right side.\nThe camera moves down. The woman is holding a transparent plastic zipper bag on the countertop and starts to add pieces of meat into it. She first takes the first piece of meat from the plate and puts it into the plastic bag. Then the camera zooms in. The woman picks up several more pieces of meat from a plate filled with pieces of meat and puts them into the bag one by one. The camera zooms in to focus on her hands and the bag. A measuring cup, a bowl and a mixer in the bowl can be seen on the countertop.\nThen, the camera switches to another perspective. The woman picks up a glass bowl filled with yellow liquid and green herbs and pours it into the plastic bag with the pieces of meat. After pouring the liquid, the woman puts the glass bowl back on the countertop and then gently adjusts the plastic bag with her hands.", "events": [ { "event": "At the beginning of the video, the camera is focused on a woman who is standing in front of the kitchen countertop. She is wearing a black-and-white short-sleeved top and light-colored pants. The background is a modern kitchen with wooden cabinets and a marble countertop. There is a white blender on the left side, and a sink and some kitchen utensils near the window on the right side.", "visual_elements": [ { "content": "The camera is focused on a woman.", "type": "camera", "weight": 3 }, { "content": "The woman is standing in front of the kitchen countertop.", "type": "attribute", "weight": 2 }, { "content": "The woman is wearing a black-and-white short-sleeved top and light-colored pants.", "type": "attribute", "weight": 2 }, { "content": "The background is a modern kitchen", "type": "scene", "weight": 2 }, { "content": "There are wooden cabinets and a marble countertop in the kitchen.", "type": "attribute", "weight": 1 }, { "content": "There is a white blender on the left side.", "type": "attribute", "weight": 1 }, { "content": "There is a sink and some kitchen utensils near the window on the right side.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves down. The woman is holding a transparent plastic zipper bag on the countertop and starts to add pieces of meat into it. She first takes the first piece of meat from the plate and puts it into the plastic bag. Then the camera zooms in. The woman picks up several more pieces of meat from a plate filled with pieces of meat and puts them into the bag one by one. The camera zooms in to focus on her hands and the bag. A measuring cup, a bowl and a mixer in the bowl can be seen on the countertop.", "visual_elements": [ { "content": "The camera moves down.", "type": "camera", "weight": 3 }, { "content": "The woman is holding a transparent plastic zipper bag on the countertop.", "type": "action", "weight": 2 }, { "content": "The woman starts to add pieces of meat into it.", "type": "action", "weight": 2 }, { "content": "The woman first takes the first piece of meat from the plate and puts it into the plastic bag.", "type": "action", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The woman picks up several more pieces of meat from a plate filled with pieces of meat and puts them into the bag one by one.", "type": "action", "weight": 3 }, { "content": "The camera zooms in to focus on the woman's hands and the bag.", "type": "camera", "weight": 3 }, { "content": "A measuring cup, a bowl and a mixer in the bowl can be seen on the countertop.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera switches to another perspective. The woman picks up a glass bowl filled with yellow liquid and green herbs and pours it into the plastic bag with the pieces of meat. After pouring the liquid, the woman puts the glass bowl back on the countertop and then gently adjusts the plastic bag with her hands.", "visual_elements": [ { "content": "The camera switches to another perspective.", "type": "camera", "weight": 3 }, { "content": "The woman picks up a glass bowl filled with yellow liquid and green herbs and pours it into the plastic bag with the pieces of meat.", "type": "action", "weight": 3 }, { "content": "After pouring the liquid, the woman puts the glass bowl back on the countertop.", "type": "action", "weight": 2 }, { "content": "The woman gently adjusts the plastic bag with her hands.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0628", "video_path": "YouCook2/126/vZariS25PF8/split_0.mp4", "video_source": "YouCook2", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Cooking", "duration": 10.04, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a kitchen scene. The background is orange cabinets and walls, and various bowls, dishes and cups are placed on the cabinets. A young woman wearing a light green dress with white polka dots is standing in the center of the frame. In front of her are a gas stove and a black cast-iron frying pan. She is holding a bottle of oil in her left hand and making gestures with her right hand.\nNext, the camera switches to a close-up shot, showing that she pours the oil into the frying pan. Then the woman pours a bowl of sliced onions into the pan.\nThen, the camera switches back to a panoramic view, showing the onions being stir-fried in the pan. The woman is holding the stove with her left hand and constantly stirring the onions with a wooden spatula in her right hand to make them evenly heated. She is talking while stirring.", "events": [ { "event": "At the beginning of the video, the camera is focused on a kitchen scene. The background is orange cabinets and walls, and various bowls, dishes and cups are placed on the cabinets. A young woman wearing a light green dress with white polka dots is standing in the center of the frame. In front of her are a gas stove and a black cast-iron frying pan. She is holding a bottle of oil in her left hand and making gestures with her right hand.", "visual_elements": [ { "content": "The camera is focused on a kitchen scene.", "type": "camera", "weight": 3 }, { "content": "The background is orange cabinets and walls.", "type": "scene", "weight": 2 }, { "content": "Various bowls, dishes and cups are placed on the cabinets.", "type": "attribute", "weight": 2 }, { "content": "A young woman wearing a light green dress with white polka dots is standing in the center of the frame.", "type": "scene", "weight": 3 }, { "content": "In front of her are a gas stove and a black cast-iron frying pan.", "type": "attribute", "weight": 2 }, { "content": "The woman is holding a bottle of oil in her left hand.", "type": "action", "weight": 2 }, { "content": "The woman is making gestures with her right hand.", "type": "action", "weight": 1 } ] }, { "event": "Next, the camera switches to a close-up shot, showing that she pours the oil into the frying pan. Then the woman pours a bowl of sliced onions into the pan.", "visual_elements": [ { "content": "The camera switches to a close-up shot.", "type": "camera", "weight": 3 }, { "content": "It shows that the woman pours the oil into the frying pan.", "type": "attribute", "weight": 2 }, { "content": "The woman pours a bowl of sliced onions into the pan.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera switches back to a panoramic view, showing the onions being stir-fried in the pan. The woman is holding the stove with her left hand and constantly stirring the onions with a wooden spatula in her right hand to make them evenly heated. She is talking while stirring.", "visual_elements": [ { "content": "The camera switches back to a panoramic view.", "type": "camera", "weight": 3 }, { "content": "It shows the onions being stir-fried in the pan.", "type": "scene", "weight": 2 }, { "content": "The woman is holding the stove with her left hand.", "type": "action", "weight": 2 }, { "content": "The woman is constantly stirring the onions with a wooden spatula in her right hand to make them evenly heated.", "type": "action", "weight": 3 }, { "content": "The woman is talking while stirring.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0629", "video_path": "Pexels/011c2cdd024393381492a115b724ee114c5afc5f7d0167f0ed01374eb22d7ccf.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 12.55, "resolution": { "width": 3840, "height": 2160 }, "caption": "The camera starts with a medium shot showing a young black woman and a small dog in her arms. The woman is smiling, wearing a pink coat, black-framed glasses, long black hair draped over her shoulders, a gray hat on her head, and a pair of black gloves on her hands. The small dog's body is brown and white, wearing a small white dress. Behind the woman is a black-framed glass window, which reflects some snow on the ground and the nearby yellow buildings. The camera remains stable.\nAs the video progresses, the camera remains focused on the woman and the puppy. The puppy turns its head from time to time. The woman smiles and gently strokes the puppy's head.", "events": [ { "event": "The camera starts with a medium shot showing a young black woman and a small dog in her arms. The woman is smiling, wearing a pink coat, black-framed glasses, long black hair draped over her shoulders, a gray hat on her head, and a pair of black gloves on her hands. The small dog's body is brown and white, wearing a small white dress. Behind the woman is a black-framed glass window, which reflects some snow on the ground and the nearby yellow buildings. The camera remains stable.", "visual_elements": [ { "content": "The camera starts with a medium shot.", "type": "camera", "weight": 3 }, { "content": "The picture shows a young black woman and a puppy in her arms.", "type": "attribute", "weight": 3 }, { "content": "The woman is smiling and wearing a pink coat.", "type": "attribute", "weight": 3 }, { "content": "The woman is wearing black-framed glasses, long black hair draped over her shoulders, a gray hat on her head, and a pair of black gloves on her hands.", "type": "attribute", "weight": 3 }, { "content": "The puppy's body is brown and white.", "type": "attribute", "weight": 3 }, { "content": "The puppy is wearing a small white dress.", "type": "attribute", "weight": 2 }, { "content": "Behind the woman is a black-framed glass window.", "type": "scene", "weight": 3 }, { "content": "The glass window reflects some snow on the ground and the nearby yellow buildings.", "type": "scene", "weight": 2 }, { "content": "The camera remains stable.", "type": "camera", "weight": 3 } ] }, { "event": "As the video progresses, the camera remains focused on the woman and the puppy. The puppy turns its head from time to time. The woman smiles and gently strokes the puppy's head.", "visual_elements": [ { "content": "During the video, the camera always focuses on the woman and the puppy.", "type": "camera", "weight": 3 }, { "content": "The puppy turns its head from time to time.", "type": "action", "weight": 2 }, { "content": "The woman smiles and gently strokes the puppy's head.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0630", "video_path": "Pexels/0133eb1d8963c0828b92739a6995060ae9d81578b8eca23c0b17666abec5fca9.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 11.68, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera focuses on two women standing on a rooftop. The woman on the left is wearing a white strapless long dress, with blonde short hair and her right arm covered in tattoos. The woman on the right is dressed in a black strapless long dress and has a buzz cut. The backdrop is a cityscape of buildings with neatly arranged windows.\nThe two women stand still, their hands hanging naturally at their sides, and they look down at each other's arms dangling. Subsequently, the woman on the left slowly raises her left hand, and the woman on the right also lifts her right hand. Their hands meet in the air and gently clasp. They then lift their heads to look at each other and smile. Throughout the entire process, the camera does not move and remains at a fixed angle.", "events": [ { "event": "At the beginning of the video, the camera focuses on two women standing on a rooftop. The woman on the left is wearing a white strapless long dress, with blonde short hair and her right arm covered in tattoos. The woman on the right is dressed in a black strapless long dress and has a buzz cut. The backdrop is a cityscape of buildings with neatly arranged windows.", "visual_elements": [ { "content": "The camera focuses on two women standing on a rooftop.", "type": "camera", "weight": 3 }, { "content": "The woman on the left side of the screen wears a white strapless long dress.", "type": "attribute", "weight": 2 }, { "content": "The woman has blonde short hair.", "type": "attribute", "weight": 2 }, { "content": "The woman right arm is covered in tattoos.", "type": "attribute", "weight": 1 }, { "content": "The woman on the right wears a black strapless long dress.", "type": "attribute", "weight": 2 }, { "content": "The woman has a buzz cut.", "type": "attribute", "weight": 2 }, { "content": "The backdrop is a cityscape of buildings.", "type": "scene", "weight": 2 }, { "content": "The windows of the buildings are neatly arranged.", "type": "attribute", "weight": 1 } ] }, { "event": "The two women stand still, their hands hanging naturally at their sides, and they look down at each other's arms dangling. Subsequently, the woman on the left slowly raises her left hand, and the woman on the right also lifts her right hand. Their hands meet in the air and gently clasp. They then lift their heads to look at each other and smile. Throughout the entire process, the camera does not move and remains at a fixed angle.", "visual_elements": [ { "content": "The two women stand still.", "type": "attribute", "weight": 2 }, { "content": "Both of them have their hands hanging naturally at their sides.", "type": "attribute", "weight": 2 }, { "content": "Both of them look down at each other's dangling arms.", "type": "attribute", "weight": 1 }, { "content": "The woman on the left slowly raises her left hand.", "type": "action", "weight": 3 }, { "content": "The person on the right also lifts her right hand.", "type": "action", "weight": 3 }, { "content": "Both of their hands meet in the air.", "type": "attribute", "weight": 2 }, { "content": "Both of them hold hands.", "type": "action", "weight": 3 }, { "content": "Both of them lift their heads to look at each other and smile.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0631", "video_path": "Pexels/0144fd8fae67e600178d8344d5670a01e56e4519dc0a69700875b97f4ff58f30.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 17.14, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a young woman wearing a white long-sleeved shirt. She has long purple hair and blue lipstick on her mouth. There are neon signs in the background, and the lighting colors are mainly pink, blue and yellow. The lighting in the background is dim.\nThen the camera begins to shake up and down.The camera is focused on the woman's face.The woman's body is facing the camera sideways, with her head facing the camera.\nThen, the camera moves down and switches to a young man in white clothes. He has a beard, a black hat, and tattoos on his arms. The man is bent forward with both hands holding an object in front of him. The woman's hand is on the man's shoulder, and her body disappears to the upper right of the picture. The neon lights in the background disappear to the upper right of the picture.\nFinally, the camera keeps shaking, first moving to the upper right, focusing on the man's head, and then moving to the lower left, focusing on the man's arm.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young woman wearing a white long-sleeved shirt. She has long purple hair and blue lipstick on her mouth. There are neon signs in the background, and the lighting colors are mainly pink, blue and yellow. The lighting in the background is dim.", "visual_elements": [ { "content": "The camera is focused on a young woman wearing a white long-sleeved shirt.", "type": "camera", "weight": 3 }, { "content": "The woman has long purple hair and blue lipstick on her lips.", "type": "attribute", "weight": 3 }, { "content": "There are neon signs in the background.", "type": "scene", "weight": 3 }, { "content": "The lighting colors are mainly pink, blue and yellow.", "type": "scene", "weight": 2 }, { "content": "The background is dimly lit.", "type": "scene", "weight": 3 } ] }, { "event": "Then the camera begins to shake up and down.The camera is focused on the woman's face.The woman's body is facing the camera sideways, with her head facing the camera.", "visual_elements": [ { "content": "Then the camera begins to shake up and down.", "type": "camera", "weight": 3 }, { "content": "The camera is focused on the woman's face.", "type": "camera", "weight": 3 }, { "content": "The woman's body is facing the camera sideways, with her head facing the camera.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera moves down and switches to a young man in white clothes. He has a beard, a black hat, and tattoos on his arms. The man is bent forward with both hands holding an object in front of him. The woman's hand is on the man's shoulder, and her body disappears to the upper right of the picture. The neon lights in the background disappear to the upper right of the picture.", "visual_elements": [ { "content": "Then, the camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The camera switches to a young man in white clothes.", "type": "camera", "weight": 3 }, { "content": "The man has a beard, a black hat, and tattoos on his arms.", "type": "attribute", "weight": 3 }, { "content": "The man is bent forward with his hands holding an object in front of him.", "type": "action", "weight": 2 }, { "content": "The woman's hand is on the man's shoulder, and her body disappears to the upper right of the screen.", "type": "attribute", "weight": 1 }, { "content": "The neon lights in the background disappear to the upper right of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera keeps shaking, first moving to the upper right, focusing on the man's head, and then moving to the lower left, focusing on the man's arm.", "visual_elements": [ { "content": "Finally, the camera keeps shaking.", "type": "camera", "weight": 3 }, { "content": "The camera moves to the upper right first, focusing on the man's head.", "type": "camera", "weight": 1 }, { "content": "The camera moves to the lower left again, focusing on the man's arm.", "type": "camera", "weight": 1 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0632", "video_path": "Pexels/019f7e67a03cd20565a8f72aae6ec3b333b68105a0cd5ef3e53e985a3b1ac22a.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 13.44, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focused on a black cow. The cow is eating green and yellow grass with its head down. The wind is blowing gently on the grass. The cow is large and has smooth hair. There is a red mark on the cow's ear. In the background is a wooden fence, behind which is a grove of trees.\nAs the video progresses, the camera moves slightly to the left. The cow moves forward a few times while lowering its head to eat grass. The cow's ears occasionally move.", "events": [ { "event": "The video begins with the camera focused on a black cow. The cow is eating green and yellow grass with its head down. The wind is blowing gently on the grass. The cow is large and has smooth hair. There is a red mark on the cow's ear. In the background is a wooden fence, behind which is a grove of trees.", "visual_elements": [ { "content": "The camera is focused on a black cow.", "type": "camera", "weight": 3 }, { "content": "The cow is eating grass with its head down.", "type": "action", "weight": 3 }, { "content": "The grass is green and yellow.", "type": "scene", "weight": 2 }, { "content": "The wind blows the grass gently.", "type": "scene", "weight": 1 }, { "content": "The cow is large and has smooth hair.", "type": "attribute", "weight": 2 }, { "content": "There is a red mark on the cow's ear.", "type": "attribute", "weight": 2 }, { "content": "The background is a wooden fence, and behind the fence is a grove of trees.", "type": "scene", "weight": 3 } ] }, { "event": "As the video progresses, the camera moves slightly to the left. The cow moves forward a few times while lowering its head to eat grass. The cow's ears occasionally move.", "visual_elements": [ { "content": "As the video progresses, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The cow moves forward a few times while eating grass with its head down.", "type": "action", "weight": 3 }, { "content": "The cow's ears occasionally move a few times.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0633", "video_path": "Pexels/01a32c65ff0f16d0850b946eb269b4c293f4e3e5fe5509c459d60fc88d97f0a5.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 13.03, "resolution": { "width": 2158, "height": 3840 }, "caption": "At the beginning of the video, the camera focuses on a row of small boats moored on the sea surface, which are secured to the shore with ropes. The boats gently sway with the ripples of the sea. Each boat flies a red flag. The boats are mainly painted in white and wood colors, with some logos and numbers on their hulls. The boat on the far right of the screen also displays a Turkish flag, which flaps in the wind.\nIn the background, a pleasure cruise ship sails on the sea. Across the river, buildings are densely packed on the hillsides with undulating terrain. In the distance, there stands a tall tower with a slender, pointed spire piercing through the clouds. The sky is covered with thick clouds, appearing grayish-white. Throughout the sequence, the camera gently sways left and right, and the perspective remains fixed on the row of moored boats.", "events": [ { "event": "At the beginning of the video, the camera focuses on a row of small boats moored on the sea surface, which are secured to the shore with ropes. The boats gently sway with the ripples of the sea. Each boat flies a red flag. The boats are mainly painted in white and wood colors, with some logos and numbers on their hulls. The boat on the far right of the screen also displays a Turkish flag, which flaps in the wind.", "visual_elements": [ { "content": "The camera focuses on a row of small boats moored on the sea surface.", "type": "camera", "weight": 3 }, { "content": "The boats are secured to the shore with ropes.", "type": "attribute", "weight": 1 }, { "content": "The boats  gently sway with the ripples of the sea.", "type": "attribute", "weight": 3 }, { "content": "Each boat flies a red flag.", "type": "attribute", "weight": 2 }, { "content": "The boats are mainly painted in white and wood colors.", "type": "attribute", "weight": 2 }, { "content": "Some logos and numbers are on the hulls of the boats.", "type": "attribute", "weight": 1 }, { "content": "The boat on the far right displays a Turkish flag.", "type": "attribute", "weight": 2 }, { "content": "The flag flaps in the wind.", "type": "attribute", "weight": 2 } ] }, { "event": "In the background, a pleasure cruise ship sails on the sea. Across the river, buildings are densely packed on the hillsides with undulating terrain. In the distance, there stands a tall tower with a slender, pointed spire piercing through the clouds. The sky is covered with thick clouds, appearing grayish-white. Throughout the sequence, the camera gently sways left and right, and the perspective remains fixed on the row of moored boats.", "visual_elements": [ { "content": "In the background, a pleasure cruise ship sails on the sea.", "type": "scene", "weight": 1 }, { "content": "Buildings are densely packed on the undulating hillsides across the river.", "type": "scene", "weight": 2 }, { "content": "A tall tower stands in the distance.", "type": "scene", "weight": 2 }, { "content": "The tower has a slender, pointed spire.", "type": "attribute", "weight": 1 }, { "content": "The sky is covered with thick clouds.", "type": "scene", "weight": 2 }, { "content": "The sky appears grayish-white.", "type": "attribute", "weight": 1 }, { "content": "Throughout the sequence, the camera gently sways left and right.", "type": "camera", "weight": 3 }, { "content": "The camera perspective remains fixed on the row of moored boats.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0634", "video_path": "Pexels/028601fed528278368c3f37633f75db3f672d22860335fa6e30c30b6a7a384fd.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 16.2, "resolution": { "width": 1080, "height": 1920 }, "caption": "The video begins with the camera focused on a calico cat lying on a brown-grey sofa. The cat's face is mainly orange and white, with a hint of black next to its left eye. The cat's eyes are slightly half-closed, and its ears are erect, shaking slightly from time to time. There is a hand on the right side of the screen, with a small snack in the palm of the hand, which is light yellow in color and irregular in shape. The background is blurred. On the left is a grey cushion. The camera focuses on the cat's face.\nThe camera shakes slightly, and the cat leans forward slightly, its nose almost close to the palm, and it sniffs carefully before licking the food. After the cat finishes eating the cat food, it shakes its head and licks its mouth. The palm slowly moves away from the picture.", "events": [ { "event": "The video begins with the camera focused on a calico cat lying on a brown-grey sofa. The cat's face is mainly orange and white, with a hint of black next to its left eye. The cat's eyes are slightly half-closed, and its ears are erect, shaking slightly from time to time. There is a hand on the right side of the screen, with a small snack in the palm of the hand, which is light yellow in color and irregular in shape. The background is blurred. On the left is a grey cushion. The camera focuses on the cat's face.", "visual_elements": [ { "content": "The camera is aimed at a tricolor cat lying on a brown-gray sofa.", "type": "camera", "weight": 3 }, { "content": "The cat's face is mainly orange and white, with a little black next to the left eye.", "type": "attribute", "weight": 3 }, { "content": "The cat's eyes are slightly half-closed, and its ears are erect, shaking slightly from time to time.", "type": "attribute", "weight": 2 }, { "content": "There is a hand on the right side of the picture.", "type": "attribute", "weight": 3 }, { "content": "A small snack is placed in the palm of the hand, which is light yellow in color and irregular in shape.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred.", "type": "scene", "weight": 3 }, { "content": "There is a gray cushion on the left side of the picture in the background.", "type": "scene", "weight": 2 }, { "content": "The camera focuses on the cat's face.", "type": "camera", "weight": 3 } ] }, { "event": "The camera shakes slightly, and the cat leans forward slightly, its nose almost close to the palm, and it sniffs carefully before licking the food. After the cat finishes eating the cat food, it shakes its head and licks its mouth. The palm slowly moves away from the picture.", "visual_elements": [ { "content": "The camera shakes slightly.", "type": "action", "weight": 2 }, { "content": "The cat's body leans forward slightly, its nose almost close to the palm, and it begins to lick after sniffing carefully.", "type": "action", "weight": 2 }, { "content": "After the cat finishes eating the cat food, it shakes its head and licks its mouth.", "type": "action", "weight": 2 }, { "content": "The palm slowly moves away from the picture.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0635", "video_path": "Pexels/03384074621be81e7de0c9631150a9154ce08e3c9667003dd5ab3e88ad48162d.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 15.28, "resolution": { "width": 2560, "height": 1440 }, "caption": "At the beginning of the video, the camera overlooks the entire scene from a high altitude, revealing a wide river winding through the city. Dense buildings line both banks of the river, with low-rise cottages being the majority. In the distance, a few scattered high-rise buildings stand out.\nThe camera slowly descends, bringing into view a cluster of distinctive buildings on the left bank of the river. The roofs of these buildings are brightly colored in red, adorned with golden spires and carvings.\nAs the camera continues its descent, the greenbelts and smaller buildings surrounding the distinctive architectural cluster come into scene. The roofs of these buildings are of various colors, including red, blue, and white. At the bottom of the screen, a small plaza is visible, with a platform at its center displaying a golden sculpture. Along the riverbank, a red pathway is seen, with a few people walking on it. Black fences surround the pathway, and streetlights are intermittently lined up alongside it.", "events": [ { "event": "At the beginning of the video, the camera overlooks the entire scene from a high altitude, revealing a wide river winding through the city. Dense buildings line both banks of the river, with low-rise cottages being the majority. In the distance, a few scattered high-rise buildings stand out.", "visual_elements": [ { "content": "The camera overlooks the entire scene from a high altitude.", "type": "camera", "weight": 3 }, { "content": "A wide river winds through the city.", "type": "attribute", "weight": 3 }, { "content": "Dense buildings line both banks of the river.", "type": "attribute", "weight": 3 }, { "content": "Low-rise cottages are the majority.", "type": "attribute", "weight": 2 }, { "content": "A few scattered high-rise buildings stand out in the distance.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera slowly descends, bringing into view a cluster of distinctive buildings on the left bank of the river. The roofs of these buildings are brightly colored in red, adorned with golden spires and carvings.", "visual_elements": [ { "content": "The camera slowly pans down.", "type": "camera", "weight": 3 }, { "content": "A cluster of distinctive buildings on the left bank of the river comes into view.", "type": "attribute", "weight": 3 }, { "content": "The roofs of these buildings are brightly colored in red.", "type": "attribute", "weight": 2 }, { "content": "Adorned with golden spires and carvings.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues its descent, the greenbelts and smaller buildings surrounding the distinctive architectural cluster come into scene. The roofs of these buildings are of various colors, including red, blue, and white. At the bottom of the screen, a small plaza is visible, with a platform at its center displaying a golden sculpture. Along the riverbank, a red pathway is seen, with a few people walking on it. Black fences surround the pathway, and streetlights are intermittently lined up alongside it.", "visual_elements": [ { "content": "The camera continues to pan down.", "type": "camera", "weight": 3 }, { "content": "Greenbelts and smaller buildings surrounding the distinctive architectural cluster come into scene.", "type": "attribute", "weight": 3 }, { "content": "The roofs of these buildings are of various colors, including red, blue, and white.", "type": "attribute", "weight": 2 }, { "content": "A small plaza is visible at the bottom of the screen.", "type": "attribute", "weight": 2 }, { "content": "There is a platform at the center of the plaza.", "type": "attribute", "weight": 2 }, { "content": "There is a platform with a golden sculpture displayed on it.", "type": "attribute", "weight": 2 }, { "content": "A red pathway along the riverbank.", "type": "attribute", "weight": 2 }, { "content": "A few people walking on the pathway.", "type": "action", "weight": 1 }, { "content": "Black fences surround the pathway.", "type": "attribute", "weight": 1 }, { "content": "Streetlights are intermittently lined up alongside the pathway.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0636", "video_path": "Pexels/0363fe201abcabba972d41b8215c9d935eac024acc67bdd58cd9810f68521949.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 11.4, "resolution": { "width": 2048, "height": 922 }, "caption": "The video begins with the camera focusing on a port water area, where the calm surface of the water reflects the surrounding scenery. In the left rear of the scene, there is a large gray-white ship with noticeable text markings on its hull. Behind the ship, there are several large cranes. Beside the ship, there is an abandoned building with a rusted red roof. The port is surrounded by yellow trees, and behind the trees, there are several high-rise buildings.\nThe camera slowly pans downward to the left. In the lower right corner of the scene, a man dressed in dark clothes and wearing a yellow hat is slowly moving toward the shore. The camera stops, and the man continues walking forward until he reaches the water's edge, where he gazes at the surface of the water.", "events": [ { "event": "The video begins with the camera focusing on a port water area, where the calm surface of the water reflects the surrounding scenery. In the left rear of the scene, there is a large gray-white ship with noticeable text markings on its hull. Behind the ship, there are several large cranes. Beside the ship, there is an abandoned building with a rusted red roof. The port is surrounded by yellow trees, and behind the trees, there are several high-rise buildings.", "visual_elements": [ { "content": "The video begins with the camera focusing on a port water area.", "type": "camera", "weight": 3 }, { "content": "The calm surface of the water reflects the surrounding scenery.", "type": "attribute", "weight": 2 }, { "content": "In the left rear of the scene, there is a large gray-white ship.", "type": "attribute", "weight": 3 }, { "content": "The hull of the ship has noticeable text markings.", "type": "attribute", "weight": 2 }, { "content": "Behind the ship, there are several large cranes.", "type": "attribute", "weight": 2 }, { "content": "Beside the ship, there is an abandoned building.", "type": "attribute", "weight": 2 }, { "content": "The red roof of the building is rusted.", "type": "attribute", "weight": 2 }, { "content": "The port is surrounded by withered yellow trees.", "type": "scene", "weight": 2 }, { "content": "Behind the trees, there are several high-rise buildings.", "type": "scene", "weight": 1 } ] }, { "event": "The camera slowly pans downward to the left. In the lower right corner of the scene, a man dressed in dark clothes and wearing a yellow hat is slowly moving toward the shore. The camera stops, and the man continues walking forward until he reaches the water's edge, where he gazes at the surface of the water.", "visual_elements": [ { "content": "The camera slowly pans downward to the left.", "type": "camera", "weight": 3 }, { "content": "In the lower right corner of the scene, a man is dressed in dark clothes.", "type": "attribute", "weight": 1 }, { "content": "The man wears a yellow hat.", "type": "attribute", "weight": 1 }, { "content": "The man slowly moves toward the shore.", "type": "action", "weight": 2 }, { "content": "The camera stops.", "type": "camera", "weight": 3 }, { "content": "The man continues walking forward until he reaches the shore.", "type": "action", "weight": 2 }, { "content": "The man gazes at the surface of the water.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0637", "video_path": "Pexels/037cc5439b117d2619b2023f54312a5038a29fc3a60f94cd1d8932556b832537.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 14.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a blue sky with some white clouds floating in the sky. On the left side of the frame is a pine tree with branches extending to the right. On the right side of the frame is a red leaf tree with some green trees and bushes in the background.\nThe camera moves slowly to the right as more red-leafed trees enter the frame. The camera moves to the left and all the branches of the pine tree enter the frame.", "events": [ { "event": "The video begins with the camera focusing on a blue sky with some white clouds floating in the sky. On the left side of the frame is a pine tree with branches extending to the right. On the right side of the frame is a red leaf tree with some green trees and bushes in the background.", "visual_elements": [ { "content": "The camera focuses on a blue sky.", "type": "camera", "weight": 3 }, { "content": "There are some white clouds floating in the sky.", "type": "scene", "weight": 3 }, { "content": "On the left side of the picture is a pine tree.", "type": "attribute", "weight": 2 }, { "content": "The branches of the tree extend to the right.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the picture is a red leaf tree.", "type": "scene", "weight": 3 }, { "content": "In the background are some green trees and bushes.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves slowly to the right as more red-leafed trees enter the frame. The camera moves to the left and all the branches of the pine tree enter the frame.", "visual_elements": [ { "content": "The camera moves slowly to the right.", "type": "camera", "weight": 3 }, { "content": "More red leaf trees enter the frame.", "type": "scene", "weight": 2 }, { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "All the branches of the pine tree come into the frame.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0638", "video_path": "Pexels/0396f4f3591a5827515389d716a074fcd61a925f0052e85bb2b8fa88b3ca20de.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 10.01, "resolution": { "width": 2160, "height": 3840 }, "caption": "The camera focuses on a Black man. He wears a light gray short-sleeved T-shirt and blue jeans, standing in front of the railing of the bridge. He holds a cardboard sign that says \"Human Rights!\"\nIn the background, a tall building and several other structures are visible, with the sky appearing grayish-blue. In the foreground, two pedestrians quickly pass by the camera, and through the bridge's railing, vehicles can be seen driving on the road below.", "events": [ { "event": "The camera focuses on a Black man. He wears a light gray short-sleeved T-shirt and blue jeans, standing in front of the railing of the bridge. He holds a cardboard sign that says \"Human Rights!\"", "visual_elements": [ { "content": "The camera focuses on a Black man.", "type": "camera", "weight": 3 }, { "content": "He wears a light gray short-sleeved T-shirt and blue jeans.", "type": "attribute", "weight": 2 }, { "content": "He stands in front of the railing of the bridge.", "type": "attribute", "weight": 2 }, { "content": "The man holds a cardboard sign with both hands.", "type": "attribute", "weight": 2 }, { "content": "The cardboard sign says \"Human Rights!\"", "type": "attribute", "weight": 2 } ] }, { "event": "In the background, a tall building and several other structures are visible, with the sky appearing grayish-blue. In the foreground, two pedestrians quickly pass by the camera, and through the bridge's railing, vehicles can be seen driving on the road below.", "visual_elements": [ { "content": "In the background, a tall building and several other structures are visible.", "type": "scene", "weight": 2 }, { "content": "The sky appears grayish-blue.", "type": "scene", "weight": 2 }, { "content": "In the foreground, two pedestrians quickly pass by the camera.", "type": "scene", "weight": 1 }, { "content": "Through the bridge's railing, vehicles can be seen driving on the road below.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0639", "video_path": "Pexels/3393b3db1fa35b3fc9a739d9dea5b76d37683dde05e699c8d60e2d427a358f81.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 10.01, "resolution": { "width": 2160, "height": 3840 }, "caption": "The video begins with the camera focused on a dog wearing a red harness with light brown and white fur. The dog's mouth is slightly open with its tongue hanging out. On the right side of the screen is a woman crouching in a tight black sportswear and grey sneakers. The woman holds a silver leash on the dog's back with her right hand and gently strokes the dog's neck with a gold bracelet on her left hand. In the background is a blue sky and a silver wire fence, with some tall buildings visible in the distance.\nThe camera then moves slightly to the right, the dog's tail disappearing from the frame, and the woman's movements become more obvious, first stroking the dog's neck and then its head and back.\nFinally, the camera moves slightly to the left. The dog walks out of the frame from the left side, and then the woman stands up and pulls the silver leash to the left front of the frame. The red handle of the leash appears. The camera finally captures the woman's left hand, left leg and one of the dog's ears. As the woman stands up, more floors are revealed on the right side of the frame, and the tops of the green plants under the wire fence also appear.", "events": [ { "event": "The video begins with the camera focused on a dog wearing a red harness with light brown and white fur. The dog's mouth is slightly open with its tongue hanging out. On the right side of the screen is a woman crouching in a tight black sportswear and grey sneakers. The woman holds a silver leash on the dog's back with her right hand and gently strokes the dog's neck with a gold bracelet on her left hand. In the background is a blue sky and a silver wire fence, with some tall buildings visible in the distance.", "visual_elements": [ { "content": "The camera is pointed at a dog wearing a red harness.", "type": "camera", "weight": 3 }, { "content": "The dog's fur is light brown and white.", "type": "attribute", "weight": 3 }, { "content": "The dog's mouth is slightly open, and its tongue is sticking out.", "type": "action", "weight": 2 }, { "content": "A woman in a tight black sportswear and gray sneakers is squatting on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The woman holds a silver leash in her right hand and places it on the dog's back.", "type": "action", "weight": 2 }, { "content": "The woman wears a gold bracelet on her left hand and gently strokes the dog's neck.", "type": "action", "weight": 2 }, { "content": "The background is a blue sky and a silver wire fence.", "type": "scene", "weight": 3 }, { "content": "There are some tall buildings in the distance.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then moves slightly to the right, the dog's tail disappearing from the frame, and the woman's movements become more obvious, first stroking the dog's neck and then its head and back.", "visual_elements": [ { "content": "Then, the camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The dog's tail disappears from the screen.", "type": "attribute", "weight": 1 }, { "content": "The woman's movements become more obvious, and she strokes the dog's neck first, then the dog's head and back.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera moves slightly to the left. The dog walks out of the frame from the left side, and then the woman stands up and pulls the silver leash to the left front of the frame. The red handle of the leash appears. The camera finally captures the woman's left hand, left leg and one of the dog's ears. As the woman stands up, more floors are revealed on the right side of the frame, and the tops of the green plants under the wire fence also appear.", "visual_elements": [ { "content": "Finally, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The dog walks out of the screen from the left side.", "type": "action", "weight": 2 }, { "content": "The woman stands up and pulls the silver leash to the front left of the screen.", "type": "action", "weight": 2 }, { "content": "The red handle of the leash appears.", "type": "attribute", "weight": 1 }, { "content": "The camera finally captures the woman's left hand, left leg and one of the dog's ears.", "type": "attribute", "weight": 1 }, { "content": "In the background, as the woman stands up, more floors are shown on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "The tops of the green plants under the wire fence also appear.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0640", "video_path": "Pexels/33e8e6a976951242757d8caf1635f15ee551e9790de059de5774ac4c8292d578.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 12.56, "resolution": { "width": 3840, "height": 2160 }, "caption": "At the beginning of the video, the camera is focused on an abandoned old car parked in a desolate mountain area. The front part of the car has been severely damaged, with the front end almost completely exposed, and the car body is covered with rust and peeling paint. The background is a vast expanse of hills covered with sparse vegetation. The sky is clear, presenting a deep blue color.\nThen, the camera slowly moves around the front of the car from right to left, gradually showing the side of the car. The window glass of the car is missing and the inside of the car is empty. In the background, the wild grass around the car can be seen swaying in the wind.", "events": [ { "event": "At the beginning of the video, the camera is focused on an abandoned old car parked in a desolate mountain area. The front part of the car has been severely damaged, with the front end almost completely exposed, and the car body is covered with rust and peeling paint. The background is a vast expanse of hills covered with sparse vegetation. The sky is clear, presenting a deep blue color.", "visual_elements": [ { "content": "The camera is focused on an abandoned old car.", "type": "camera", "weight": 3 }, { "content": "The car is parked in a desolate mountain area.", "type": "attribute", "weight": 2 }, { "content": "The front part of the car has been severely damaged.", "type": "attribute", "weight": 3 }, { "content": "The front end of the car is almost completely exposed.", "type": "attribute", "weight": 3 }, { "content": "The car body is covered with rust and peeling paint.", "type": "attribute", "weight": 3 }, { "content": "The background is a vast expanse of hills.", "type": "scene", "weight": 2 }, { "content": "The hills are covered with sparse vegetation.", "type": "scene", "weight": 2 }, { "content": "The sky is clear, presenting a deep blue color.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera slowly moves around the front of the car from right to left, gradually showing the side of the car. The window glass of the car is missing and the inside of the car is empty. In the background, the wild grass around the car can be seen swaying in the wind.", "visual_elements": [ { "content": "The camera slowly moves around the front of the car from right to left.", "type": "camera", "weight": 3 }, { "content": "The camera gradually shows the side of the car.", "type": "attribute", "weight": 2 }, { "content": "The window glass of the car is missing.", "type": "attribute", "weight": 2 }, { "content": "The inside of the car is empty.", "type": "attribute", "weight": 2 }, { "content": "In the background, the wild grass around the car can be seen swaying in the wind.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0641", "video_path": "Pexels/34ca0b34404cf57c5f6cecc171479954caf4ee1f3f7cee9a625c5fd8bb268a92.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 11.87, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with a young white woman sitting on the ground with a husky in her arms. The husky is black and white. The woman is wearing a black top with white letters and has silver hair. On the right side of the screen is the legs of a person wearing light jeans and brown boots, who is sitting by the door of a white vehicle. There are some small stones and branches on the ground in the background, and there are several fallen tree trunks in the distance.\nThe camera then moves slightly to the left, always keeping the front view of the woman and the dog in her arms. The woman gently strokes the husky's head with her left hand and hugs the dog's body with her right hand. The dog's eyes are slightly closed. The legs on the right side of the picture slowly rise and gradually move away from the camera.\nFinally, the camera moves back slightly. The woman holds the dog's chin with both hands, and the dog's black clothing with a white tag is revealed. The legs of a person on the right side of the frame disappear completely.", "events": [ { "event": "The video begins with a young white woman sitting on the ground with a husky in her arms. The husky is black and white. The woman is wearing a black top with white letters and has silver hair. On the right side of the screen is the legs of a person wearing light jeans and brown boots, who is sitting by the door of a white vehicle. There are some small stones and branches on the ground in the background, and there are several fallen tree trunks in the distance.", "visual_elements": [ { "content": "The camera is focused on a young white woman sitting on the ground and a husky dog ​​in her arms.", "type": "camera", "weight": 3 }, { "content": "The husky dog ​​is black and white.", "type": "attribute", "weight": 3 }, { "content": "The woman is wearing a black top with white letters and has silver-white hair.", "type": "attribute", "weight": 2 }, { "content": "The legs of a person wearing light jeans and brown boots are on the right side of the screen.", "type": "attribute", "weight": 1 }, { "content": "The person on the right side of the screen is sitting by the door of a white vehicle.", "type": "action", "weight": 1 }, { "content": "There are some small stones and branches on the ground in the background.", "type": "scene", "weight": 1 }, { "content": "There are several fallen tree trunks in the distance.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then moves slightly to the left, always keeping the front view of the woman and the dog in her arms. The woman gently strokes the husky's head with her left hand and hugs the dog's body with her right hand. The dog's eyes are slightly closed. The legs on the right side of the picture slowly rise and gradually move away from the camera.", "visual_elements": [ { "content": "Then, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The camera always stays in front of the woman and the dog in her arms.", "type": "camera", "weight": 3 }, { "content": "The woman gently strokes the head of the husky with her left hand and hugs the dog's body with her right hand.", "type": "action", "weight": 3 }, { "content": "The dog's eyes are slightly closed.", "type": "action", "weight": 2 }, { "content": "The legs on the right side of the screen slowly rise and gradually leave the camera.", "type": "action", "weight": 1 } ] }, { "event": "Finally, the camera moves back slightly. The woman holds the dog's chin with both hands, and the dog's black clothing with a white tag is revealed. The legs of a person on the right side of the frame disappear completely.", "visual_elements": [ { "content": "Finally, the camera moves back slightly.", "type": "camera", "weight": 3 }, { "content": "The woman holds the dog's chin with both hands.", "type": "action", "weight": 3 }, { "content": "The black clothes with white tags on the dog are revealed.", "type": "attribute", "weight": 1 }, { "content": "The legs of a person on the right side of the screen disappear completely.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0642", "video_path": "Pexels/354c38f558d00ca5aa1429c4b07a8f2a3c2d859c62cf64c647adc2de88762b03.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 13.36, "resolution": { "width": 3840, "height": 2160 }, "caption": "At the beginning of the video, the camera overlooks a beach from a high altitude, the sea water presents a clear blue-green color, and the waves gently beat the beach. A hill appears next to the beach, covered with green vegetation, with many white rocks on the left side of the hill, and many white houses spread out on the right side of the hill, which are arranged in a staggered pattern.\nThe camera continues to move to the right, showing the view high above the hill, with the hills clearly visible in the distance.", "events": [ { "event": "At the beginning of the video, the camera overlooks a beach from a high altitude, the sea water presents a clear blue-green color, and the waves gently beat the beach. A hill appears next to the beach, covered with green vegetation, with many white rocks on the left side of the hill, and many white houses spread out on the right side of the hill, which are arranged in a staggered pattern.", "visual_elements": [ { "content": "The camera looks down on a beach from above.", "type": "camera", "weight": 3 }, { "content": "The water is a clear blue-green color.", "type": "scene", "weight": 2 }, { "content": "The waves are gently lapping at the sand.", "type": "scene", "weight": 2 }, { "content": "A hill appears next to the beach.", "type": "attribute", "weight": 2 }, { "content": "The hill is covered with green vegetation.", "type": "attribute", "weight": 2 }, { "content": "White rocks on the left side of the hill.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the hill, there are many white houses.", "type": "attribute", "weight": 2 }, { "content": "These houses are arranged in a row on the hill.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move to the right, showing the view high above the hill, with the hills clearly visible in the distance.", "visual_elements": [ { "content": "The camera continues to move to the right, showing the view from the high side of the hill", "type": "camera", "weight": 3 }, { "content": "The hills are clearly visible in the distance.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0643", "video_path": "Pexels/3579bdb0722df7d5fd186cd3a401d9fba721cb0f1b5949875d8ef4ef7f592ec1.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 19.05, "resolution": { "width": 4096, "height": 2160 }, "caption": "The video begins with the camera looking down from a high altitude, showing a body of water with different colors of water plants and algae that form various shapes and patterns. The camera begins to move to the left, showing more of the water and the distribution of water plants. The colors of the water plants are mainly green and yellow, with some areas covered with darker water plants.\nAs the camera continues to rotate to the left, a larger body of water appears with some white huts with blue roofs and floating objects. The camera gradually moves up to show more of the water and surroundings. At the edge of the water are some small islands and areas covered in water and grass, with dense vegetation on the islands.\nToward the end of the video, the camera slowly moves upward to show a full view of the entire body of water. There are various shapes and colors of grasses and algae, and the wetlands are a mix of dark and light greens, where organisms and buildings meet.", "events": [ { "event": "The video begins with the camera looking down from a high altitude, showing a body of water with different colors of water plants and algae that form various shapes and patterns. The camera begins to move to the left, showing more of the water and the distribution of water plants. The colors of the water plants are mainly green and yellow, with some areas covered with darker water plants.", "visual_elements": [ { "content": "The camera looks down from above to show a body of water.", "type": "camera", "weight": 3 }, { "content": "The water is covered with different colors of water plants and algae, creating various shapes and patterns.", "type": "attribute", "weight": 2 }, { "content": "The camera starts to move to the left to show more of the water and the distribution of the water plants.", "type": "camera", "weight": 3 }, { "content": "The colors of the water plants are mainly green and yellow.", "type": "attribute", "weight": 2 }, { "content": "Some areas are covered with dark colored water plants.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues to rotate to the left, a larger body of water appears with some white huts with blue roofs and floating objects. The camera gradually moves up to show more of the water and surroundings. At the edge of the water are some small islands and areas covered in water and grass, with dense vegetation on the islands.", "visual_elements": [ { "content": "The camera continues to rotate to the left and a larger area of water appears.", "type": "camera", "weight": 3 }, { "content": "There are some white huts with blue roofs and floating objects on the surface of the water.", "type": "attribute", "weight": 2 }, { "content": "The camera gradually moves upwards.", "type": "camera", "weight": 3 }, { "content": "Showing more of the water and surroundings.", "type": "scene", "weight": 3 }, { "content": "At the edge of the water are small islands and areas covered with water plants.", "type": "attribute", "weight": 2 }, { "content": "The islands have dense vegetation.", "type": "attribute", "weight": 2 } ] }, { "event": "Toward the end of the video, the camera slowly moves upward to show a full view of the entire body of water. There are various shapes and colors of grasses and algae, and the wetlands are a mix of dark and light greens, where organisms and buildings meet.", "visual_elements": [ { "content": "The camera slowly moves upwards to show the whole water area in its entirety.", "type": "attribute", "weight": 2 }, { "content": "Wetland waters, creatures and buildings meet in a mix of dark and light greens.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0644", "video_path": "Pexels/35d339beb527deea90979302833bde4cc1da0452d90b44e93ea10b1a34a95707.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 16.24, "resolution": { "width": 4096, "height": 2160 }, "caption": "The video begins with the camera focusing on a young white man sitting on a green sofa. The man has black stubble and wears a light blue long-sleeved shirt and brown pants. On the man's knees is a silver laptop with a circular logo on it. On the right side of the screen, next to the man, there is a yellow dog sitting on the sofa. The dog has a red collar around its neck. The man works on the computer while stroking the dog next to him. There are two yellow cushions on the sofa, and on the left side of the screen is a large window with green trees and some buildings outside. On the windowsill is a small black vase with a few decorative branches. Under the windowsill is a row of white radiators. There is also a wooden chair in front of the sofa on the left side of the screen.\nThe camera pans left and right. The dog sniffs the man's hands and starts to stand up and sniff the keyboard on the man's lap. The man uses his left hand to stop the dog from approaching the computer. The man's right hand remains on the keyboard.", "events": [ { "event": "The video begins with the camera focusing on a young white man sitting on a green sofa. The man has black stubble and wears a light blue long-sleeved shirt and brown pants. On the man's knees is a silver laptop with a circular logo on it. On the right side of the screen, next to the man, there is a yellow dog sitting on the sofa. The dog has a red collar around its neck. The man works on the computer while stroking the dog next to him. There are two yellow cushions on the sofa, and on the left side of the screen is a large window with green trees and some buildings outside. On the windowsill is a small black vase with a few decorative branches. Under the windowsill is a row of white radiators. There is also a wooden chair in front of the sofa on the left side of the screen.", "visual_elements": [ { "content": "The camera is focused on a young white man sitting on a green sofa.", "type": "camera", "weight": 3 }, { "content": "The man has black stubble and wears a light blue long-sleeved shirt and brown pants.", "type": "attribute", "weight": 2 }, { "content": "There is a silver laptop on the man's knees.", "type": "attribute", "weight": 2 }, { "content": "There is a round logo on the computer.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the screen, next to the man, there is a yellow dog sitting on the sofa.", "type": "attribute", "weight": 2 }, { "content": "There is a red collar around the dog's neck.", "type": "attribute", "weight": 1 }, { "content": "The man is working on the computer while stroking the dog next to him.", "type": "action", "weight": 3 }, { "content": "There are two yellow cushions on the sofa.", "type": "attribute", "weight": 1 }, { "content": "There is a large window on the left side of the screen.", "type": "scene", "weight": 3 }, { "content": "There are green trees and some buildings outside the window.", "type": "scene", "weight": 1 }, { "content": "There is a small black vase on the windowsill with a few decorative branches in it.", "type": "scene", "weight": 1 }, { "content": "There is a row of white radiators under the windowsill.", "type": "scene", "weight": 2 }, { "content": "There is also a wooden chair in front of the sofa on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans left and right. The dog sniffs the man's hands and starts to stand up and sniff the keyboard on the man's lap. The man uses his left hand to stop the dog from approaching the computer. The man's right hand remains on the keyboard.", "visual_elements": [ { "content": "The camera pans left and right.", "type": "camera", "weight": 3 }, { "content": "The dog sniffs the man's hands and starts to stand up and sniff the keyboard on the man's lap.", "type": "action", "weight": 2 }, { "content": "The man uses his left hand to stop the dog from approaching the computer.", "type": "action", "weight": 3 }, { "content": "The man's right hand remains on the keyboard.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0645", "video_path": "Pexels/36751b4619ad99f8f410a5c75697fdb0d6c517c3092db3e31bcddc52d78b1237.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 11.24, "resolution": { "width": 2562, "height": 1440 }, "caption": "At the beginning of the video, the camera looks down at the entire port area from a high altitude. In the center of the picture, there is a large green cargo ship docked at the dock. There are multiple cargo holds on the deck of the cargo ship, and the lid of one of the cargo holds is open. The cargo holds are blue on the outside and red on the inside, with some yellow markings on the cargo holds. There is a white mast on the bow part. There is a white operating room at the stern part, and the top of the operating room is red. There is a long conveyor belt on the dock on the left side of the picture, connected to a large green crane. The crane has a complex structure, with four supporting legs at the bottom and an operating room on the top. The conveyor belt extends from the top of the crane and leads directly to the open cargo hold of the cargo ship. There are rows of brown-red freight train carriages under the crane. On the right side of the picture is a wide light green water surface. In the distance, there are some warehouse buildings in the background, and the roofs of these buildings are white.\nThe camera moves down, toward the center of the cargo ship. The crane and bow gradually disappear to the left of the frame, and the stern gradually disappears to the top of the frame.\nAt the end of the video, the camera moves forward slightly. The pipe extending from the left side of the screen is adding fine sand into the open cargo hold. After the fine sand enters the cargo hold, it accumulates in a circular shape. On the right side of the screen, sunlight shines on the water, reflecting a golden light.", "events": [ { "event": "At the beginning of the video, the camera looks down at the entire port area from a high altitude. In the center of the picture, there is a large green cargo ship docked at the dock. There are multiple cargo holds on the deck of the cargo ship, and the lid of one of the cargo holds is open. The cargo holds are blue on the outside and red on the inside, with some yellow markings on the cargo holds. There is a white mast on the bow part. There is a white operating room at the stern part, and the top of the operating room is red. There is a long conveyor belt on the dock on the left side of the picture, connected to a large green crane. The crane has a complex structure, with four supporting legs at the bottom and an operating room on the top. The conveyor belt extends from the top of the crane and leads directly to the open cargo hold of the cargo ship. There are rows of brown-red freight train carriages under the crane. On the right side of the picture is a wide light green water surface. In the distance, there are some warehouse buildings in the background, and the roofs of these buildings are white.", "visual_elements": [ { "content": "At the beginning of the video, the camera looks down at the entire port area from a high altitude.", "type": "camera", "weight": 3 }, { "content": "In the center of the picture, there is a large green cargo ship docked at the dock.", "type": "attribute", "weight": 3 }, { "content": "There are multiple cargo holds on the deck of the cargo ship, and the lid of one of the cargo holds is open.", "type": "attribute", "weight": 2 }, { "content": "The cargo hold is blue on the outside and red on the inside, with some yellow markings on the cargo hold.", "type": "attribute", "weight": 1 }, { "content": "There is a white mast at the bow.", "type": "attribute", "weight": 2 }, { "content": "There is a white operating room at the stern.", "type": "attribute", "weight": 2 }, { "content": "The top of the operating room is red.", "type": "attribute", "weight": 1 }, { "content": "There is a long conveyor belt on the dock on the left side of the picture, connected to a large green crane.", "type": "attribute", "weight": 2 }, { "content": "The crane has a complex structure, with four supporting legs at the bottom and an operating room at the top.", "type": "attribute", "weight": 1 }, { "content": "The conveyor belt extends from the top of the crane and leads directly to the open cargo hold of the cargo ship.", "type": "attribute", "weight": 1 }, { "content": "There are rows of brown-red freight train carriages under the crane.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the picture is a wide light green water surface.", "type": "scene", "weight": 2 }, { "content": "In the distance, there are some warehouse buildings in the background.", "type": "scene", "weight": 2 }, { "content": "The roofs of the warehouse area buildings are white.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves down, toward the center of the cargo ship. The crane and bow gradually disappear to the left of the frame, and the stern gradually disappears to the top of the frame.", "visual_elements": [ { "content": "The camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The camera moves toward the center of the ship.", "type": "camera", "weight": 2 }, { "content": "The crane and the bow gradually disappear to the left side of the screen, and the stern gradually disappears to the top of the screen.", "type": "attribute", "weight": 2 } ] }, { "event": "At the end of the video, the camera moves forward slightly. The pipe extending from the left side of the screen is adding fine sand into the open cargo hold. After the fine sand enters the cargo hold, it accumulates in a circular shape. On the right side of the screen, sunlight shines on the water, reflecting a golden light.", "visual_elements": [ { "content": "At the end of the video, the camera moves forward slightly.", "type": "camera", "weight": 2 }, { "content": "The pipe extending from the left side of the screen is adding fine sand to the open cargo hold.", "type": "action", "weight": 3 }, { "content": "After the fine sand enters the cargo hold, it accumulates in a circular shape.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the screen, sunlight shines on the water, reflecting a golden glow.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0646", "video_path": "Pexels/3692a5d91a09c2a9fb6aae7d2a88cfabdc653cbed4ae2e2961d5e6b14f56b220.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 10.01, "resolution": { "width": 2160, "height": 3840 }, "caption": "The camera focuses on two Black men sitting next to a metal railing. The man closer to the camera is wearing a black short-sleeve t-shirt and gray sweatpants. He is bald with a white beard on his face. The man to the left of the bald man is wearing a white shirt and has long, dreadlocked hair.\nThe bald man first faces to his left, then turns back to face the camera while speaking. Then, both men shift their gaze from their right to their left. Afterward, the bald man turns his head toward the camera to his right front, then continues to turn right, and finally turns left. In the background, some green trees and brightly colored buildings behind them can be seen.", "events": [ { "event": "The camera focuses on two Black men sitting next to a metal railing. The man closer to the camera is wearing a black short-sleeve t-shirt and gray sweatpants. He is bald with a white beard on his face. The man to the left of the bald man is wearing a white shirt and has long, dreadlocked hair.", "visual_elements": [ { "content": "The camera focuses on two Black men sitting next to a metal railing.", "type": "camera", "weight": 3 }, { "content": "The man closer to the camera is wearing a black short-sleeve T-shirt and gray sweatpants.", "type": "attribute", "weight": 2 }, { "content": "The man closer to the camera is bald.", "type": "attribute", "weight": 2 }, { "content": "The bald man has white facial hair.", "type": "attribute", "weight": 2 }, { "content": "The man to the left of the bald man is wearing a white top.", "type": "attribute", "weight": 2 }, { "content": "The bald man has long dreadlocks.", "type": "attribute", "weight": 2 } ] }, { "event": "The bald man first faces to his left, then turns back to face the camera while speaking. Then, both men shift their gaze from their right to their left. Afterward, the bald man turns his head toward the camera to his right front, then continues to turn right, and finally turns left. In the background, some green trees and brightly colored buildings behind them can be seen.", "visual_elements": [ { "content": "The bald man first faces his companion on the left.", "type": "attribute", "weight": 2 }, { "content": "Then, while speaking, the bald man turns back to face forward.", "type": "action", "weight": 2 }, { "content": "Both men's gaze moves from the right side to the left side together.", "type": "action", "weight": 2 }, { "content": "The bald man turns his head to look at the camera to his right front.", "type": "action", "weight": 2 }, { "content": "The bald man continues to turn his head to the right.", "type": "action", "weight": 2 }, { "content": "Finally, the bald man turns his head to the left.", "type": "action", "weight": 2 }, { "content": "In the background, some green trees and brightly colored buildings can be seen.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0647", "video_path": "Pexels/3719fe5ee64212072a0e0f471eb7cbb72288be73f842a5bb7e7318e556836de3.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 19.0, "resolution": { "width": 1080, "height": 1920 }, "caption": "At the beginning of the video, the camera is focused on the handlebars and the black dashboard at the front of a motorcycle. A black helmet is hanging on the left handlebar of the motorcycle, with its strap hanging loosely.\nThe close-up shot of the camera shows the headlight and sidelight of the motorcycle. The headlight of the motorcycle is round, and its chrome-plated shell reflects the surrounding environment. There is also a silver plate of the motorcycle below the headlight, with the word \"MotoLand\" written on the silver square. The sidelight of the motorcycle next to it is yellow.\nThe camera then moves slowly downward first to the motorcycle, which has black forks with rubber dust covers and a front wheel with metal spoke tires that have visible tread on them. The camera then moves down to the left, and the ground in the frame is a gray concrete floor with some tiny stone bumps and stains on the surface.", "events": [ { "event": "At the beginning of the video, the camera is focused on the handlebars and the black dashboard at the front of a motorcycle. A black helmet is hanging on the left handlebar of the motorcycle, with its strap hanging loosely.", "visual_elements": [ { "content": "The camera is focused on the handlebars and the black dashboard at the front of a motorcycle.", "type": "camera", "weight": 3 }, { "content": "A black helmet is hanging on the left handlebar of the motorcycle.", "type": "attribute", "weight": 3 }, { "content": "The strap of the helmet is hanging loosely.", "type": "attribute", "weight": 2 } ] }, { "event": "The close-up shot of the camera shows the headlight and sidelight of the motorcycle. The headlight of the motorcycle is round, and its chrome-plated shell reflects the surrounding environment. There is also a silver plate of the motorcycle below the headlight, with the word \"MotoLand\" written on the silver square. The sidelight of the motorcycle next to it is yellow.", "visual_elements": [ { "content": "The close-up shot of the camera shows the headlight and sidelight of the motorcycle.", "type": "camera", "weight": 3 }, { "content": "The headlight of the motorcycle is round.", "type": "attribute", "weight": 3 }, { "content": "The motorcycle's chrome-plated shell reflects the surrounding environment.", "type": "attribute", "weight": 2 }, { "content": "There is also a silver plate of the motorcycle below the headlight.", "type": "attribute", "weight": 2 }, { "content": "There are the word \"MotoLand\" written on the silver square.", "type": "attribute", "weight": 2 }, { "content": "The sidelight of the motorcycle next to it is yellow.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera then moves slowly downward first to the motorcycle, which has black forks with rubber dust covers and a front wheel with metal spoke tires that have visible tread on them. The camera then moves down to the left, and the ground in the frame is a gray concrete floor with some tiny stone bumps and stains on the surface.", "visual_elements": [ { "content": "The camera moves down slowly first.", "type": "camera", "weight": 3 }, { "content": "The motorcycle has black forks.", "type": "attribute", "weight": 3 }, { "content": "The fork has a rubber dust cover.", "type": "attribute", "weight": 2 }, { "content": "The front wheel is a metal spoke tire.", "type": "attribute", "weight": 2 }, { "content": "The tires have a distinctive tread pattern.", "type": "attribute", "weight": 2 }, { "content": "The camera then moves down to the left.", "type": "camera", "weight": 3 }, { "content": "The ground in the frame is gray concrete.", "type": "scene", "weight": 2 }, { "content": "There are some small stone bumps and stains on the surface.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0648", "video_path": "Pexels/38201637f10d8e84378c78a2b02d9517210b5cbafc4d556b2ce1105d85bbcc89.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 19.29, "resolution": { "width": 2562, "height": 1440 }, "caption": "The video begins with a high-angle shot of a parking lot entrance. On the left side of the scene, there is a small booth on the ground, with a green trash can and a red traffic cone next to it. There are two white barriers at the entrance of the parking lot. To the left of the entrance, there is a pedestrian crosswalk, with clearly visible white zebra stripes on the ground.\nThe camera slowly tilts upward, moving the parking lot entrance out of the scene. In front of the booth, two people are standing on the road, both dressed in formal attire. Their shadows are cast on the ground in front of them. In the center of the wide stone-paved road, there is a flower bed, with green plants only at both ends. On the dry land to the right of the road, there is a row of green potted plants with small white flowers. Behind the potted plants is a boundary fence, and beyond the fence lies a patch of overgrown wasteland.\nThe camera continues to move upward, revealing a single-story building in the scene. In front of the building, several white delivery trucks are parked, each with the \"HYUNDAI\" logo on them. Beyond the fence, a building with green and yellow colors appears in the wasteland, with numerical identifiers written on it.\nThe camera pans to the left, the full view of the parking lot behind the single-story building appears in the scene. The camera then stops moving. The parking lot is filled with various types of vehicles. A row of cars is also parked on the right side of the road next to the parking lot. Tall green trees surround the parking lot and the adjacent wasteland in the distance. In the background, large mountains stretch across the horizon.", "events": [ { "event": "The video begins with a high-angle shot of a parking lot entrance. On the left side of the scene, there is a small booth on the ground, with a green trash can and a red traffic cone next to it. There are two white barriers at the entrance of the parking lot. To the left of the entrance, there is a pedestrian crosswalk, with clearly visible white zebra stripes on the ground.", "visual_elements": [ { "content": "The camera looks down from above at the scene of a parking lot entrance.", "type": "camera", "weight": 3 }, { "content": "On the left side of the scene, there is a small booth on the ground.", "type": "attribute", "weight": 3 }, { "content": "Next to the booth, there is a green trash can and a red traffic cone.", "type": "attribute", "weight": 1 }, { "content": "At the parking lot entrance, there are two white barriers.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the entrance, there is a pedestrian crosswalk.", "type": "attribute", "weight": 2 }, { "content": "There are clear white zebra stripes on the ground.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera slowly tilts upward, moving the parking lot entrance out of the scene. In front of the booth, two people are standing on the road, both dressed in formal attire. Their shadows are cast on the ground in front of them. In the center of the wide stone-paved road, there is a flower bed, with green plants only at both ends. On the dry land to the right of the road, there is a row of green potted plants with small white flowers. Behind the potted plants is a boundary fence, and beyond the fence lies a patch of overgrown wasteland.", "visual_elements": [ { "content": "The camera slowly pans upwards.", "type": "camera", "weight": 3 }, { "content": "The parking lot entrance leaves the scene.", "type": "attribute", "weight": 1 }, { "content": "Two people are standing on the road in front of the booth.", "type": "attribute", "weight": 2 }, { "content": "Both people are wearing relatively formal clothing.", "type": "attribute", "weight": 1 }, { "content": "The shadows of the two people are cast on the ground in front of them.", "type": "attribute", "weight": 2 }, { "content": "In the center of the wide stone path, there is a flower bed.", "type": "attribute", "weight": 2 }, { "content": "The flower bed only has plants at both ends.", "type": "attribute", "weight": 2 }, { "content": "On the dry land on the right side of the road, there is a row of green potted plants with white flowers.", "type": "attribute", "weight": 2 }, { "content": "Behind the potted plants, there is a circle of fencing.", "type": "attribute", "weight": 1 }, { "content": "Outside the fence is a patch of wasteland overgrown with weeds.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move upward, revealing a single-story building in the scene. In front of the building, several white delivery trucks are parked, each with the \"HYUNDAI\" logo on them. Beyond the fence, a building with green and yellow colors appears in the wasteland, with numerical identifiers written on it.", "visual_elements": [ { "content": "The camera continues to move upwards.", "type": "camera", "weight": 3 }, { "content": "A single-story building appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "Several white trucks are parked in front of the building.", "type": "attribute", "weight": 2 }, { "content": "The trucks have the \"HYUNDAI\" logo on them.", "type": "attribute", "weight": 1 }, { "content": "In the wasteland outside the fence, a green and yellow building appears.", "type": "attribute", "weight": 2 }, { "content": "The building has a numerical code written on it.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera pans to the left, the full view of the parking lot behind the single-story building appears in the scene. The camera then stops moving. The parking lot is filled with various types of vehicles. A row of cars is also parked on the right side of the road next to the parking lot. Tall green trees surround the parking lot and the adjacent wasteland in the distance. In the background, large mountains stretch across the horizon.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The full view of the parking lot behind the single-story building appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "The camera stops moving.", "type": "camera", "weight": 3 }, { "content": "The parking lot is filled with various types of vehicles.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road next to the parking lot, there is also a row of cars parked.", "type": "attribute", "weight": 2 }, { "content": "Tall green trees in the distance surround the parking lot and the nearby wasteland.", "type": "scene", "weight": 2 }, { "content": "In the background, large mountains stretch across the horizon.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 29 }, { "index": "TUNA_0649", "video_path": "Pexels/3821b15e1b96e2715e7e18c3711afabf4b8f9410eae2fa9bbe908f0b1263e3ad.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 16.68, "resolution": { "width": 2160, "height": 3840 }, "caption": "At the beginning of the video, the camera focuses on a young white woman with short blond hair. The woman is wearing a black jacket and blue jeans, standing in the center of the screen. The woman holds a white megaphone in her right hand and raises her left fist. Two people stand behind the woman. On the left side of the screen is a black woman. The black woman has two buns, a blue denim jacket, a white inner layer, black pants and shoes. A corner of the person on the right side of the screen is exposed. The person on the right wears an orange top and a black vest on the upper body, and black pants and shoes on the lower body. The two people behind are holding a brown paper sign. There are black and white slogans on the sign. The background is pure white.\nThe camera gradually moves back, distancing itself from the characters. The person on the right side of the screen gradually enters the screen. There is also a black woman on the right side of the screen. The black woman has short curly black hair. The slogans on the signs gradually become clearer. The sign on the left side of the screen reads \"Who runs the world? GIRLS!\" The sign on the right side of the screen reads \"The future is FEMALE\". The two people hold signs and occasionally swing their arms up and down.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young white woman with short blond hair. The woman is wearing a black jacket and blue jeans, standing in the center of the screen. The woman holds a white megaphone in her right hand and raises her left fist. Two people stand behind the woman. On the left side of the screen is a black woman. The black woman has two buns, a blue denim jacket, a white inner layer, black pants and shoes. A corner of the person on the right side of the screen is exposed. The person on the right wears an orange top and a black vest on the upper body, and black pants and shoes on the lower body. The two people behind are holding a brown paper sign. There are black and white slogans on the sign. The background is pure white.", "visual_elements": [ { "content": "The camera focuses on a young white woman with short blond hair.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a black jacket and blue jeans.", "type": "attribute", "weight": 3 }, { "content": "The woman stands in the center of the screen.", "type": "action", "weight": 3 }, { "content": "The woman holds a white megaphone in her right hand and holds her left fist high.", "type": "action", "weight": 3 }, { "content": "Two people stand behind the woman.", "type": "attribute", "weight": 2 }, { "content": "There is a black woman on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The black woman has two buns, a blue denim jacket, a white inner layer, black pants and shoes.", "type": "attribute", "weight": 1 }, { "content": "The person on the right side of the screen shows a corner.", "type": "attribute", "weight": 1 }, { "content": "The person on the right is wearing an orange top and a black vest, and black pants and shoes.", "type": "attribute", "weight": 1 }, { "content": "The two people behind are holding a brown paper sign.", "type": "action", "weight": 2 }, { "content": "The sign has black and white slogans.", "type": "attribute", "weight": 1 }, { "content": "The background is pure white.", "type": "scene", "weight": 3 } ] }, { "event": "The camera gradually moves back, distancing itself from the characters. The person on the right side of the screen gradually enters the screen. There is also a black woman on the right side of the screen. The black woman has short curly black hair. The slogans on the signs gradually become clearer. The sign on the left side of the screen reads \"Who runs the world? GIRLS!\" The sign on the right side of the screen reads \"The future is FEMALE\". The two people hold signs and occasionally swing their arms up and down.", "visual_elements": [ { "content": "The camera gradually moves back and distances itself from the characters.", "type": "camera", "weight": 3 }, { "content": "The person on the right side of the screen gradually enters the screen.", "type": "attribute", "weight": 2 }, { "content": "There is also a black woman on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The black woman has short curly black hair.", "type": "attribute", "weight": 1 }, { "content": "The slogan on the sign gradually becomes clear.", "type": "attribute", "weight": 2 }, { "content": "The sign of the woman on the left side of the screen says \"Who runs the world? GIRLS!\"", "type": "attribute", "weight": 1 }, { "content": "The sign of the woman on the right side says \"The future is FEMALE\".", "type": "attribute", "weight": 1 }, { "content": "The two people hold signs and swing their arms up and down from time to time.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 20 }, { "index": "TUNA_0650", "video_path": "Pexels/3962239b1db54442ac243bdb45c68c06580ce5408fdec5ac8b20c4963cbc2ac5.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 16.2, "resolution": { "width": 4096, "height": 2160 }, "caption": "The video begins with the camera focusing on a husky dog ​​with its tongue hanging out. The husky's fur is mainly brown and white. The background is a bright living room with a pot of green plants on the left. A man and a woman are sitting on a beige sofa. The man sitting on the right side of the screen is wearing a white shirt and dark blue jeans, and the woman on the left side of the screen is wearing a beige top and light blue jeans. Two laptops are placed on the armrest of the sofa.\nThe camera moves slightly to the left, and the woman stands up and moves to the right of the dog. As the woman strokes the dog, the man stands up from the sofa and squats to the left of the dog. The woman leans her face against the dog, and they begin to stroke the dog together. The green plant in the white pot on the left side of the screen is fully revealed.", "events": [ { "event": "The video begins with the camera focusing on a husky dog ​​with its tongue hanging out. The husky's fur is mainly brown and white. The background is a bright living room with a pot of green plants on the left. A man and a woman are sitting on a beige sofa. The man sitting on the right side of the screen is wearing a white shirt and dark blue jeans, and the woman on the left side of the screen is wearing a beige top and light blue jeans. Two laptops are placed on the armrest of the sofa.", "visual_elements": [ { "content": "The camera is focused on a husky dog ​​with its tongue hanging out.", "type": "camera", "weight": 3 }, { "content": "The husky dog's fur is mainly brown and white.", "type": "attribute", "weight": 3 }, { "content": "The background is a bright living room with a pot of green plants on the left.", "type": "scene", "weight": 3 }, { "content": "A man and a woman are sitting on a beige sofa.", "type": "attribute", "weight": 2 }, { "content": "The man sitting on the right side of the screen is wearing a white shirt and dark blue jeans.", "type": "attribute", "weight": 1 }, { "content": "The woman on the left side of the screen is wearing a beige top and light blue jeans.", "type": "attribute", "weight": 1 }, { "content": "Two laptops are placed on the armrest of the sofa.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves slightly to the left, and the woman stands up and moves to the right of the dog. As the woman strokes the dog, the man stands up from the sofa and squats to the left of the dog. The woman leans her face against the dog, and they begin to stroke the dog together. The green plant in the white pot on the left side of the screen is fully revealed.", "visual_elements": [ { "content": "The camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The woman gets up first and goes to the right of the dog.", "type": "action", "weight": 2 }, { "content": "While the woman is petting the dog, the man stands up from the sofa and squats to the left of the dog.", "type": "action", "weight": 2 }, { "content": "The woman puts her face on the dog and they start petting the dog together.", "type": "action", "weight": 2 }, { "content": "The green plant in the white pot on the left side of the screen is fully revealed.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0651", "video_path": "Pexels/679c68ffd4affcac248a17e47bd01c75f4204d150aff86b1f770fcf8bfce2cb6.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 17.28, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera focused on the glass exterior of a modern building. In the background, interior lights and some furniture can be seen inside the exterior.\nThe camera moves steadily to the left. The glass wall first reflects a woman walking to the left of the screen. The woman's hair is bright orange, and she is wearing a long blue-purple coat, light-colored pants and high-heeled boots.\nThen, the woman enters the frame from the right side. The woman holds a coffee cup in her right hand and a light-colored bag on her left shoulder. The woman walks steadily and looks forward. As the camera moves, the glass wall shows a wider area. The yellow sign reflected by the glass wall gradually enters the left side of the frame, surrounded by white slogans. At the end of the video, the woman walks to the left side of the frame, and only the glass wall still reflects the woman's back.", "events": [ { "event": "The video begins with the camera focused on the glass exterior of a modern building. In the background, interior lights and some furniture can be seen inside the exterior.", "visual_elements": [ { "content": "The camera is aimed at the glass exterior of a modern building.", "type": "camera", "weight": 3 }, { "content": "Indoor lights and some furniture can be seen in the exterior wall.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves steadily to the left. The glass wall first reflects a woman walking to the left of the screen. The woman's hair is bright orange, and she is wearing a long blue-purple coat, light-colored pants and high-heeled boots.", "visual_elements": [ { "content": "The camera moves steadily to the left.", "type": "camera", "weight": 3 }, { "content": "The glass wall first reflects a woman walking to the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The woman's hair is bright orange, and she is wearing a long blue-purple coat, light-colored pants and high-heeled boots.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the woman enters the frame from the right side. The woman holds a coffee cup in her right hand and a light-colored bag on her left shoulder. The woman walks steadily and looks forward. As the camera moves, the glass wall shows a wider area. The yellow sign reflected by the glass wall gradually enters the left side of the frame, surrounded by white slogans. At the end of the video, the woman walks to the left side of the frame, and only the glass wall still reflects the woman's back.", "visual_elements": [ { "content": "Then, the woman enters the screen from the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The woman holds a coffee cup in her right hand and carries a light-colored bag on her left shoulder.", "type": "attribute", "weight": 2 }, { "content": "The woman's steps are steady and her eyes are forward.", "type": "action", "weight": 2 }, { "content": "The display area of ​​the glass wall in the background is wider.", "type": "scene", "weight": 2 }, { "content": "The yellow sign reflected by the glass wall gradually enters the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "There are white slogans around the sign.", "type": "scene", "weight": 1 }, { "content": "At the end of the video, the woman walks to the left side of the screen, and only the glass wall still reflects the woman's back.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0652", "video_path": "Pexels/68ef334c67f84e5fd0547e2c5f60f1d2c59221d91401d7badfce006684d15b55.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 11.3, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a pair of hands. These hands are placed on a light-colored table, holding a white ceramic bowl. There is a dish in the bowl, covered with orange sauce, with black wood ear mushrooms and a small bunch of green coriander beside it. The background is blurry.\nThe camera continues to slowly zoom out while tilting slightly to the right, but still focused on the dish in the bowl. The owner of the hands is wearing a blue-green plaid shirt with the sleeves rolled up, exposing the forearms.", "events": [ { "event": "At the beginning of the video, the camera is focused on a pair of hands. These hands are placed on a light-colored table, holding a white ceramic bowl. There is a dish in the bowl, covered with orange sauce, with black wood ear mushrooms and a small bunch of green coriander beside it. The background is blurry.", "visual_elements": [ { "content": "The camera is focused on a pair of hands.", "type": "camera", "weight": 3 }, { "content": "These hands are placed on a light-colored table.", "type": "attribute", "weight": 1 }, { "content": "The hand holds a white ceramic bowl.", "type": "attribute", "weight": 2 }, { "content": "There is a dish in the bowl.", "type": "attribute", "weight": 3 }, { "content": "The dish is covered with orange sauce.", "type": "attribute", "weight": 3 }, { "content": "There are black wood ear mushrooms and a small bunch of green coriander beside the sauce.", "type": "attribute", "weight": 3 }, { "content": "The background is blurry.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to slowly zoom out while tilting slightly to the right, but still focused on the dish in the bowl. The owner of the hands is wearing a blue-green plaid shirt with the sleeves rolled up, exposing the forearms.", "visual_elements": [ { "content": "The camera continues to slowly zoom out.", "type": "camera", "weight": 3 }, { "content": "The camera tilts slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The camera still focuses on the dish in the bowl.", "type": "camera", "weight": 3 }, { "content": "The owner of the hands is wearing a blue-green plaid shirt with the sleeves rolled up.", "type": "attribute", "weight": 2 }, { "content": "This person exposes the forearms.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0653", "video_path": "Pexels/68f1546d21c3465f6fd652afdeddfc0ff007856173e75fde1b66ad1cef76626f.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 12.46, "resolution": { "width": 1080, "height": 1920 }, "caption": "The video begins with the camera focused on the front of a red motorcycle, and the camera moves to the lower left to show a close-up of the motorcycle's dashboard and headlights. The dashboard has a circular speedometer next to a green button and a silver keyhole. The camera pulls away slightly to show the motorcycle's headlight frame in a rectangular shape with a clear shade.\nThe camera moves slowly to the right and closes in on a silver logo on the handlebars of the motorcycle, which reads “Schwalbe”. The background is a blurry gray.", "events": [ { "event": "The video begins with the camera focused on the front of a red motorcycle, and the camera moves to the lower left to show a close-up of the motorcycle's dashboard and headlights. The dashboard has a circular speedometer next to a green button and a silver keyhole. The camera pulls away slightly to show the motorcycle's headlight frame in a rectangular shape with a clear shade.", "visual_elements": [ { "content": "The camera focuses on the front of a red motorcycle.", "type": "camera", "weight": 3 }, { "content": "The camera moves to the lower left.", "type": "camera", "weight": 3 }, { "content": "The camera close-up shows the dashboard and headlight of the motorcycle.", "type": "camera", "weight": 3 }, { "content": "The dashboard has a round speedometer.", "type": "attribute", "weight": 2 }, { "content": "There is also a green button and a silver keyhole next to the dashboard.", "type": "attribute", "weight": 2 }, { "content": "The camera zooms out slightly.", "type": "camera", "weight": 3 }, { "content": "The headlight frame of the motorcycle is rectangular in shape.", "type": "attribute", "weight": 3 }, { "content": "The lampshade is transparent.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera moves slowly to the right and closes in on a silver logo on the handlebars of the motorcycle, which reads “Schwalbe”. The background is a blurry gray.", "visual_elements": [ { "content": "The camera moves slowly to the right and zooms in.", "type": "camera", "weight": 3 }, { "content": "The motorcycle has a silver logo on the handlebars.", "type": "attribute", "weight": 2 }, { "content": "The logo reads “Schwalbe”.", "type": "attribute", "weight": 2 }, { "content": "The background is a blurry gray.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0654", "video_path": "Pexels/6981bc9b293a6df96671853ed4615c9304ebde9ef59c9cf85b47e3fdb08f0809.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 9.71, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focused on a grand building, with the front occupying the center of the scene. On either side of the building, there are two tall towers. In the center of the building, there is a large round window, surrounded by intricate carvings. The main entrance of the building has three large black doors, each with an arched decoration above. A long staircase leads up to the building, with black railings along the sides. In the lower right corner of the scene, a group of people is active on the platform to the right of the building. The sky is covered with thick clouds.\nThe camera slowly pans to the lower left. On the left side of the building, there is a patch of green grass. Behind the grass, there is a house, surrounded by a few bare trees. On the left side of the scene, to the left of the staircase, there is a patch of shrubs. Behind the shrubs, there is a blue sign. In the background, sunlight filters through the clouds and shines on the house in the distance.", "events": [ { "event": "The video begins with the camera focused on a grand building, with the front occupying the center of the scene. On either side of the building, there are two tall towers. In the center of the building, there is a large round window, surrounded by intricate carvings. The main entrance of the building has three large black doors, each with an arched decoration above. A long staircase leads up to the building, with black railings along the sides. In the lower right corner of the scene, a group of people is active on the platform to the right of the building. The sky is covered with thick clouds.", "visual_elements": [ { "content": "The camera is focused on a grand building.", "type": "camera", "weight": 3 }, { "content": "The front of the building occupies the center of the scene.", "type": "attribute", "weight": 2 }, { "content": "On either side of the building, there are two tall towers.", "type": "attribute", "weight": 2 }, { "content": "In the center of the building, there is a large round window.", "type": "attribute", "weight": 2 }, { "content": "The main entrance of the building has three large black doors.", "type": "attribute", "weight": 2 }, { "content": "Above each door, there is an arched decoration.", "type": "attribute", "weight": 1 }, { "content": "A long staircase leads up to the building.", "type": "attribute", "weight": 2 }, { "content": "The staircase has black railings along its sides.", "type": "attribute", "weight": 2 }, { "content": "In the lower right corner of the scene, a group of people is active on the platform to the right of the building.", "type": "attribute", "weight": 1 }, { "content": "The sky is covered with thick clouds.", "type": "scene", "weight": 2 } ] }, { "event": "The camera slowly pans to the lower left. On the left side of the building, there is a patch of green grass. Behind the grass, there is a house, surrounded by a few bare trees. On the left side of the scene, to the left of the staircase, there is a patch of shrubs. Behind the shrubs, there is a blue sign. In the background, sunlight filters through the clouds and shines on the house in the distance.", "visual_elements": [ { "content": "The camera slowly pans to the lower left.", "type": "camera", "weight": 3 }, { "content": "On the left side of the building, there is a patch of green grass.", "type": "attribute", "weight": 1 }, { "content": "Behind the grass, there is a house.", "type": "attribute", "weight": 2 }, { "content": "The house is surrounded by a few bare trees.", "type": "attribute", "weight": 1 }, { "content": "On the left side of the scene, to the left of the staircase, there is a patch of shrubs.", "type": "attribute", "weight": 2 }, { "content": "Behind the shrubs, there is a blue sign.", "type": "attribute", "weight": 1 }, { "content": "In the background, sunlight filters through the clouds and shines on the house in the distance.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0655", "video_path": "Pexels/6a359342e886669385a69bcaffb278708dcf097fb6f132d23bbc35556cfe4bfc.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 18.35, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with a young white man and woman sitting on the grass outdoors with a dog. The woman is on the left, the man is on the right, and the dog is in the middle. Both are wearing blue denim jackets, jeans, and white shoes. The woman has long blond hair, while the man has short black hair and a beard. The dog has lush brown hair that is slightly curly. The man and woman sometimes stroke the dog's head and sometimes pull up its ears. The background is an open meadow with some withered grass and scattered trees with bare branches. The white sky in the distance is bright.\nThen, the camera moves forward. The man puts his right hand on the woman's waist and strokes the dog with his left hand. The woman smiles and strokes the dog with both hands. The dog opens its mouth and shows its tongue.\nThen, the camera pauses for a few seconds, then continues to move forward. The camera focuses on the man and woman. The dog gradually disappears to the bottom of the screen, with only a corner of its head showing.\nFinally, the camera moves down and the dog appears again. The man and the woman smile at the camera, and the dog also looks up with its tongue tucked in. The man's left hand sometimes strokes the dog's neck, and sometimes on the dog's head. The woman happily leans slowly on the man's shoulder.", "events": [ { "event": "The video begins with a young white man and woman sitting on the grass outdoors with a dog. The woman is on the left, the man is on the right, and the dog is in the middle. Both are wearing blue denim jackets, jeans, and white shoes. The woman has long blond hair, while the man has short black hair and a beard. The dog has lush brown hair that is slightly curly. The man and woman sometimes stroke the dog's head and sometimes pull up its ears. The background is an open meadow with some withered grass and scattered trees with bare branches. The white sky in the distance is bright.", "visual_elements": [ { "content": "The camera is focused on a young white man and woman sitting on the grass outdoors with a dog.", "type": "camera", "weight": 3 }, { "content": "The woman is on the left side of the picture, the man is on the right side of the picture, and the dog is in the middle of the two.", "type": "attribute", "weight": 3 }, { "content": "Both the man and the woman are wearing blue denim jackets, jeans, and a pair of white shoes.", "type": "attribute", "weight": 2 }, { "content": "The woman has long blond hair, while the man has short black hair and a beard.", "type": "attribute", "weight": 2 }, { "content": "The dog has lush brown hair that is slightly curly.", "type": "attribute", "weight": 2 }, { "content": "The man and the woman sometimes stroke the dog's head and sometimes pull up the dog's ears.", "type": "action", "weight": 3 }, { "content": "The background is an open meadow.", "type": "scene", "weight": 3 }, { "content": "There are some withered grass and scattered trees on the meadow, and the branches of the trees are bare.", "type": "scene", "weight": 2 }, { "content": "The white sky in the distance is bright.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves forward. The man puts his right hand on the woman's waist and strokes the dog with his left hand. The woman smiles and strokes the dog with both hands. The dog opens its mouth and shows its tongue.", "visual_elements": [ { "content": "Then, the camera moves forward.", "type": "camera", "weight": 3 }, { "content": "The man puts his right hand on the woman's waist and strokes the dog with his left hand.", "type": "action", "weight": 2 }, { "content": "The woman strokes the dog with both hands and smiles.", "type": "action", "weight": 2 }, { "content": "The dog opens its mouth and shows its tongue.", "type": "action", "weight": 1 } ] }, { "event": "Then, the camera pauses for a few seconds, then continues to move forward. The camera focuses on the man and woman. The dog gradually disappears to the bottom of the screen, with only a corner of its head showing.", "visual_elements": [ { "content": "Then, the camera pauses for a few seconds and then moves forward.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the man and woman.", "type": "camera", "weight": 3 }, { "content": "The dog gradually disappears to the bottom of the screen, with only a corner of its head showing.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera moves down and the dog appears again. The man and the woman smile at the camera, and the dog also looks up with its tongue tucked in. The man's left hand sometimes strokes the dog's neck, and sometimes on the dog's head. The woman happily leans slowly on the man's shoulder.", "visual_elements": [ { "content": "Finally, the camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The dog reappears in the picture.", "type": "attribute", "weight": 2 }, { "content": "The man and woman smile at the camera, and the dog also puts back its tongue and looks up.", "type": "action", "weight": 3 }, { "content": "The man's left hand sometimes strokes the dog's neck, and sometimes puts it on the dog's head.", "type": "action", "weight": 2 }, { "content": "The woman slowly leans on the man's shoulder happily.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0656", "video_path": "Pexels/6bd316fa70c4209765535117001ae97601e3da4fb384a6d51716690df9b38548.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 18.52, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera focusing on the back of a man sitting on a high stool. He is wearing a black T-shirt, aqua blue jeans, a white baseball cap, a purple bracelet on his right hand, and is playing an acoustic guitar. There is a microphone stand in front of the man, and he is singing into the microphone. In the background is a red brick wall with several decorative paintings and neon billboards with words such as \"BUD LIGHT\" and \"ABITA\" hanging on it. The largest painting on the background wall shows a man in a white suit holding a guitar. Next to the painting is a TV hanging on the wall.\nAt the end of the video, the camera starts to move to the lower left, gradually leaving the man's back and turning to the ground. As the camera moves, a speaker appears on the left side of the screen. The camera finally stops on a red and gray wall.", "events": [ { "event": "The video begins with the camera focusing on the back of a man sitting on a high stool. He is wearing a black T-shirt, aqua blue jeans, a white baseball cap, a purple bracelet on his right hand, and is playing an acoustic guitar. There is a microphone stand in front of the man, and he is singing into the microphone. In the background is a red brick wall with several decorative paintings and neon billboards with words such as \"BUD LIGHT\" and \"ABITA\" hanging on it. The largest painting on the background wall shows a man in a white suit holding a guitar. Next to the painting is a TV hanging on the wall.", "visual_elements": [ { "content": "At the beginning of the video, the camera is focused on the back of a man sitting on a high stool.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black T-shirt, aqua blue jeans, a white baseball cap, and a purple bracelet on his right hand.", "type": "attribute", "weight": 3 }, { "content": "The man is playing an acoustic guitar and singing into the microphone stand in front of him.", "type": "action", "weight": 3 }, { "content": "The background is a red brick wall with several decorative paintings and neon billboards on it.", "type": "scene", "weight": 3 }, { "content": "The billboards have words such as \"BUD LIGHT\" and \"ABITA\".", "type": "scene", "weight": 2 }, { "content": "The largest painting on the background wall shows a man wearing a white suit and holding a guitar.", "type": "scene", "weight": 1 }, { "content": "Next to the painting is a TV hanging on the wall.", "type": "scene", "weight": 1 } ] }, { "event": "At the end of the video, the camera starts to move to the lower left, gradually leaving the man's back and turning to the ground. As the camera moves, a speaker appears on the left side of the screen. The camera finally stops on a red and gray wall.", "visual_elements": [ { "content": "At the end of the video, the camera starts to move to the lower left.", "type": "camera", "weight": 3 }, { "content": "The camera gradually moves away from the man's back and turns to the ground.", "type": "camera", "weight": 2 }, { "content": "As the camera moves, a speaker appears on the left side of the screen.", "type": "attribute", "weight": 1 }, { "content": "The camera finally stops on a red and gray wall.", "type": "camera", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0657", "video_path": "Pexels/6c196cbdb8034ce21b74d5ff68de806bfc13f4ec05c04f1466bb0edad85424c5.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 17.2, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera focusing on a barren patch of ground covered in debris and dirt. The background is a gradient sky that gradually transitions from a dark blue at the horizon to a light pink color and then to a light blue above.\nThe camera shakes slightly as a lighted black Hyundai slowly approaches from the distance, its headlights glowing in pale yellow, its license plate number “0990AO797”, and the Hyundai logo on the front of the car gradually appearing.\nAs the car continues to move, the camera moves slightly to the right to show the side profile of the car. The front wheels and the side of the car gradually come into view.", "events": [ { "event": "The video begins with the camera focusing on a barren patch of ground covered in debris and dirt. The background is a gradient sky that gradually transitions from a dark blue at the horizon to a light pink color and then to a light blue above.", "visual_elements": [ { "content": "The camera focuses on a barren patch of ground", "type": "camera", "weight": 3 }, { "content": "The ground is covered with debris and dirt.", "type": "scene", "weight": 1 }, { "content": "The background is a gradient sky.", "type": "scene", "weight": 2 }, { "content": "The sky gradually transitions from a dark blue at the horizon to a light pink, and then to a light blue above it.", "type": "scene", "weight": 2 } ] }, { "event": "The camera shakes slightly as a lighted black Hyundai slowly approaches from the distance, its headlights glowing in pale yellow, its license plate number “0990AO797”, and the Hyundai logo on the front of the car gradually appearing.", "visual_elements": [ { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "A lighted black Hyundai is slowly approaching from the distance.", "type": "scene", "weight": 3 }, { "content": "The headlights of the car are glowing bright yellow.", "type": "attribute", "weight": 3 }, { "content": "The license plate number is “0990AO797”.", "type": "attribute", "weight": 1 }, { "content": "The Hyundai logo on the front of the car is also appearing.", "type": "attribute", "weight": 2 } ] }, { "event": "As the car continues to move, the camera moves slightly to the right to show the side profile of the car. The front wheels and the side of the car gradually come into view.", "visual_elements": [ { "content": "The car continues to move.", "type": "attribute", "weight": 2 }, { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The side profile of the car is shown.", "type": "attribute", "weight": 2 }, { "content": "The front wheels and the side of the car are coming into view.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0658", "video_path": "Pexels/99fd7c8b120b58b1b13460f1a6552b7195a5bb6598b275eaba170fd95101488a.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 14.06, "resolution": { "width": 2160, "height": 3840 }, "caption": "The video begins with the camera focused on a city street, with a tall classical building in the background, with many windows on the exterior wall. There is a decorative spire structure in front of the building, surrounded by a black fence. There are traffic lights and road signs on the street. There are several people walking on the street.\nThen, the camera moves slightly to the right. A young black man enters from the right side of the screen. The man is wearing a gray suit, a dark shirt, black pants, and black leather shoes. The man is holding a mobile phone in his right hand and talking on it.\nThen, the camera moves to the left following the black man. The man walks to a yellow taxi and reaches out to open the back door. The back seat of the taxi is black. There is a billboard on the top of the taxi, which reads \"GIRL FROM THE NORTH COUNTRY ON BROADWAY\" and indicates the name of the creator and music author. The man holds a mobile phone in his right hand, opens the door with his left hand, and bends down to get in the car. There is a lush tree on the street opposite the man in the background. There are tall buildings in the distance.\nFinally, the camera starts to move forward, and as the man enters the taxi, the camera speeds up. At the end of the video, the man has sat in the back seat and closed the door.", "events": [ { "event": "The video begins with the camera focused on a city street, with a tall classical building in the background, with many windows on the exterior wall. There is a decorative spire structure in front of the building, surrounded by a black fence. There are traffic lights and road signs on the street. There are several people walking on the street.", "visual_elements": [ { "content": "The camera is pointed at a city street.", "type": "camera", "weight": 3 }, { "content": "The background is a tall classical building.", "type": "scene", "weight": 3 }, { "content": "There are many windows on the exterior wall of the building.", "type": "scene", "weight": 2 }, { "content": "There is a decorative spire in front of the building.", "type": "scene", "weight": 2 }, { "content": "There is a black fence around the spire.", "type": "scene", "weight": 1 }, { "content": "There are traffic lights and road signs on the street.", "type": "scene", "weight": 1 }, { "content": "Several people are walking on the street.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves slightly to the right. A young black man enters from the right side of the screen. The man is wearing a gray suit, a dark shirt, black pants, and black leather shoes. The man is holding a mobile phone in his right hand and talking on it.", "visual_elements": [ { "content": "Then, the camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "A young black man enters from the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a gray suit, a dark shirt, black pants, and black leather shoes.", "type": "attribute", "weight": 2 }, { "content": "The man is holding a mobile phone in his right hand and talking.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera moves to the left following the black man. The man walks to a yellow taxi and reaches out to open the back door. The back seat of the taxi is black. There is a billboard on the top of the taxi, which reads \"GIRL FROM THE NORTH COUNTRY ON BROADWAY\" and indicates the name of the creator and music author. The man holds a mobile phone in his right hand, opens the door with his left hand, and bends down to get in the car. There is a lush tree on the street opposite the man in the background. There are tall buildings in the distance.", "visual_elements": [ { "content": "Then, the camera moves left with the black man.", "type": "camera", "weight": 3 }, { "content": "The man walks to a yellow taxi and opens the back door.", "type": "action", "weight": 3 }, { "content": "The back seat of the taxi is black.", "type": "attribute", "weight": 1 }, { "content": "There is a billboard on the top of the taxi that reads \"GIRL FROM THE NORTH COUNTRY ON BROADWAY\" and indicates the name of the creator and the composer of the music.", "type": "attribute", "weight": 2 }, { "content": "The man holds the mobile phone in his right hand, opens the door with his left hand, and bends down to get in the car.", "type": "action", "weight": 2 }, { "content": "There is a lush tree on the street opposite the man in the background.", "type": "scene", "weight": 2 }, { "content": "There are tall buildings in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera starts to move forward, and as the man enters the taxi, the camera speeds up. At the end of the video, the man has sat in the back seat and closed the door.", "visual_elements": [ { "content": "Finally, the camera starts to move forward.", "type": "camera", "weight": 3 }, { "content": "As the man enters the taxi, the speed of the camera advances faster.", "type": "camera", "weight": 3 }, { "content": "At the end of the video, the man sits in the back seat and closes the door.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0659", "video_path": "Pexels/9ac4c50e8e5bada217831e2903050cd99c5df5b3603df57c2f72cacfea496166.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 15.64, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera looking down from a high altitude on a vast landscape of farmland. Large green and brown fields are in the scene, with the fields showing a regular circular and rectangular distribution. In the distance are rolling hills. The camera moves to the left and there are some small buildings and roads in the middle of the fields, with some trees and bushes around the buildings.\nThe camera continues to move to the left, there is some irrigation equipment in the field, it is in the shape of a long straight line, there are some sprinkler heads on the equipment and the irrigation operation is being carried out, the trajectory of the movement of the irrigation equipment leaves a clear trail on the field.\nThe camera continues to move to the left and eventually settles on a plowed field.", "events": [ { "event": "The video begins with the camera looking down from a high altitude on a vast landscape of farmland. Large green and brown fields are in the scene, with the fields showing a regular circular and rectangular distribution. In the distance are rolling hills. The camera moves to the left and there are some small buildings and roads in the middle of the fields, with some trees and bushes around the buildings.", "visual_elements": [ { "content": "The camera looking down from a high altitude on a vast landscape of farmland.", "type": "camera", "weight": 3 }, { "content": "Large green and brown fields are in the picture.", "type": "scene", "weight": 3 }, { "content": "The ground shows a regular circular and rectangular distribution.", "type": "attribute", "weight": 2 }, { "content": "In the distance are rolling hills.", "type": "scene", "weight": 2 }, { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "There are some small buildings and roads in the middle of the fields.", "type": "scene", "weight": 2 }, { "content": "Some trees and bushes around the buildings.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to move to the left, there is some irrigation equipment in the field, it is in the shape of a long straight line, there are some sprinkler heads on the equipment and the irrigation operation is being carried out, the trajectory of the movement of the irrigation equipment leaves a clear trail on the field.", "visual_elements": [ { "content": "The camera continues to move to the left.", "type": "camera", "weight": 3 }, { "content": "There is some irrigation equipment in the field, it is in the shape of a long straight line.", "type": "scene", "weight": 3 }, { "content": "There are some spray nozzles on the crown and cap equipment.", "type": "attribute", "weight": 2 }, { "content": "Irrigation equipment is performing irrigation operations.", "type": "attribute", "weight": 1 }, { "content": "The trajectory of the irrigation equipment left visible marks on the field.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to move to the left and eventually settles on a plowed field.", "visual_elements": [ { "content": "The camera continues to move to the left.", "type": "camera", "weight": 3 }, { "content": "The camera eventually settles on a plowed field.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0660", "video_path": "Pexels/9adc1041d96cc386b2d96d2f2c3c058c3812be9db22c665ed9d7955f1f8e6613.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 11.9, "resolution": { "width": 1080, "height": 1920 }, "caption": "At the beginning of the video, the camera is focused on a young black woman in front of the door of a black car. The woman is wearing a black business suit and black flat shoes. The woman holds a red folder and a mobile phone in her left hand and a pen in her right hand. The woman uses her right hand to put the mobile phone in her left hand on the red folder in her left arm. Then, the woman lowers her head and writes something on the document with a pen. The car door is open, and the car has a beige interior. The sky is blue, there are several tall palm trees in the background, and a tree trunk in the foreground is wrapped with green lights. There is a modern white building in the distance.\nThe camera moves slightly to the right, and the left side of the tree with green lights in the foreground gradually disappears. The woman is concentrating on marking the documents and occasionally adjusting her hair.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young black woman in front of the door of a black car. The woman is wearing a black business suit and black flat shoes. The woman holds a red folder and a mobile phone in her left hand and a pen in her right hand. The woman uses her right hand to put the mobile phone in her left hand on the red folder in her left arm. Then, the woman lowers her head and writes something on the document with a pen. The car door is open, and the car has a beige interior. The sky is blue, there are several tall palm trees in the background, and a tree trunk in the foreground is wrapped with green lights. There is a modern white building in the distance.", "visual_elements": [ { "content": "The camera is focused on a young black woman in front of the door of a black car.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a black business suit and black flat shoes.", "type": "attribute", "weight": 3 }, { "content": "The woman holds a red folder and a mobile phone in her left hand and a pen in her right hand.", "type": "attribute", "weight": 3 }, { "content": "The woman puts the mobile phone in her left hand on the folder with her right hand.", "type": "action", "weight": 2 }, { "content": "Then, the woman lowers her head and writes something on the document with a pen.", "type": "action", "weight": 3 }, { "content": "The car door is open, and the car has a beige interior.", "type": "attribute", "weight": 1 }, { "content": "The sky is blue.", "type": "scene", "weight": 2 }, { "content": "There are several tall palm trees in the background.", "type": "scene", "weight": 2 }, { "content": "A tree trunk in the foreground is wrapped with green lights.", "type": "scene", "weight": 1 }, { "content": "There is a modern white building in the distance.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves slightly to the right, and the left side of the tree with green lights in the foreground gradually disappears. The woman is concentrating on marking the documents and occasionally adjusting her hair.", "visual_elements": [ { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The left side of the tree wrapped with green lights in the foreground gradually disappears.", "type": "attribute", "weight": 2 }, { "content": "The woman focuses on marking the document and occasionally adjusts her hair.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0661", "video_path": "Pexels/9b7ea843cfdaefa09230c38d2c5bbf91e0187506c3a5f8dc1f460386583e7637.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 11.8, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera pointing at a store window with two women inside. The woman on the left is wearing a white mask, while the woman on the right is wearing a blue mask. The woman on the left is wearing a white dress with pink patterns. The woman on the right is wearing a green dress with a white coat with black plaids, a cup of coffee in her left hand and a woven bag on her left arm. They are walking and talking. The background is a store with various goods on display. There are some blue boxes on the left side of the screen. There are several orange electronic devices on display near the door on the right side of the screen. There are several notices posted at the store entrance.\nThen, the camera moves to the left. The two people walk out of the store and continue talking on the sidewalk outside the door. The woman in the green skirt almost completely blocks the camera, gesturing with her hands while talking. There are other pedestrians and store signs in the background. There are green plants and street lights on the left side of the picture.\nFinally, the camera moves slightly to the left and then fixes at an angle. The woman in the floral dress pulls the woman in the plaid dress to her left with her left hand. At the end of the video, the camera captures the backs of the two people walking side by side on the sidewalk. In the background, there is a woman in a red short-sleeved shirt walking from the opposite direction.", "events": [ { "event": "The video begins with the camera pointing at a store window with two women inside. The woman on the left is wearing a white mask, while the woman on the right is wearing a blue mask. The woman on the left is wearing a white dress with pink patterns. The woman on the right is wearing a green dress with a white coat with black plaids, a cup of coffee in her left hand and a woven bag on her left arm. They are walking and talking. The background is a store with various goods on display. There are some blue boxes on the left side of the screen. There are several orange electronic devices on display near the door on the right side of the screen. There are several notices posted at the store entrance.", "visual_elements": [ { "content": "The camera is pointed at a shop window.", "type": "camera", "weight": 3 }, { "content": "There are two women in the window.", "type": "attribute", "weight": 3 }, { "content": "The woman on the left side of the screen is wearing a white mask, and the woman on the right side of the screen is wearing a blue mask.", "type": "attribute", "weight": 3 }, { "content": "The woman on the left side of the screen is wearing a white dress with pink patterns.", "type": "attribute", "weight": 2 }, { "content": "The woman on the right side of the screen is wearing a green dress with a white coat with black plaid on the outside.", "type": "attribute", "weight": 2 }, { "content": "The woman on the right side of the screen is holding a cup of coffee in her left hand and a woven bag on her left arm.", "type": "attribute", "weight": 1 }, { "content": "The two are talking while walking to the left side of the screen near the window.", "type": "action", "weight": 2 }, { "content": "The background is a store with various goods on display.", "type": "scene", "weight": 3 }, { "content": "There are some blue boxes on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "There are several orange electronic devices near the door on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "There are several notices posted at the door of the store.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves to the left. The two people walk out of the store and continue talking on the sidewalk outside the door. The woman in the green skirt almost completely blocks the camera, gesturing with her hands while talking. There are other pedestrians and store signs in the background. There are green plants and street lights on the left side of the picture.", "visual_elements": [ { "content": "Then, the camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The two people walk out of the store while talking and continue to talk on the sidewalk at the door.", "type": "action", "weight": 3 }, { "content": "The woman in the green dress almost completely blocks the camera, gesturing with her hands while talking.", "type": "action", "weight": 2 }, { "content": "There are other pedestrians and store signs in the background.", "type": "scene", "weight": 2 }, { "content": "There are green plants and street lights on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera moves slightly to the left and then fixes at an angle. The woman in the floral dress pulls the woman in the plaid dress to her left with her left hand. At the end of the video, the camera captures the backs of the two people walking side by side on the sidewalk. In the background, there is a woman in a red short-sleeved shirt walking from the opposite direction.", "visual_elements": [ { "content": "Finally, the camera moves slightly to the left and then fixes at an angle.", "type": "camera", "weight": 3 }, { "content": "The woman in the floral dress pulls the woman in the plaid dress to her left with her left hand.", "type": "action", "weight": 3 }, { "content": "At the end of the video, the camera captures the backs of the two people walking side by side on the sidewalk.", "type": "attribute", "weight": 2 }, { "content": "In the background, there is a woman in a red short-sleeved shirt walking from the opposite direction.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0662", "video_path": "Pexels/9c34965ed99844c80c8c230051e2b5e32fefa1e3096a6966994f90a0e22af01c.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 15.85, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera looking down at the coastline from a high altitude, where the waves continue to lap against the beach, creating white waves. The camera moves to the right to show more of the shoreline and the dynamics of the waves, with some buildings and equipment along the shoreline.", "events": [ { "event": "The video begins with the camera looking down at the coastline from a high altitude, where the waves continue to lap against the beach, creating white waves. The camera moves to the right to show more of the shoreline and the dynamics of the waves, with some buildings and equipment along the shoreline.", "visual_elements": [ { "content": "The camera looking down at the coastline from a high altitude.", "type": "camera", "weight": 3 }, { "content": "The waves continue to lap against the beach, creating white waves.", "type": "scene", "weight": 2 }, { "content": "The camera moves to the right to show more of the shoreline and the dynamics of the waves.", "type": "camera", "weight": 3 }, { "content": "Some buildings and equipment along the shoreline.", "type": "scene", "weight": 2 } ] } ], "n_events": 1, "n_elements": 4 }, { "index": "TUNA_0663", "video_path": "Pexels/9f19319190071dcde61a8ea01d657da6e1d279e398290435ef48e11866391fbb.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 12.96, "resolution": { "width": 3840, "height": 2160 }, "caption": "At the beginning of the video, the camera is focused on a woman standing inside a modern building. The woman's hair is black and naturally curly. The woman is wearing a peach suit jacket with a beige turtleneck sweater and holding a white laptop in her hands. The background is a spacious and bright interior space with multiple floors. There are three transparent elevators side by side in the background. The camera keeps a medium shot.\nThe camera pans left around the woman. From the left side of the woman to the right side of the woman, the woman is always smiling at the camera. As the camera moves, the transparent elevator in the background goes up and down. When the camera turns to the right, the elevator disappears on the left side of the screen, and a corridor with several floors of stairs appears on the right side of the screen.", "events": [ { "event": "At the beginning of the video, the camera is focused on a woman standing inside a modern building. The woman's hair is black and naturally curly. The woman is wearing a peach suit jacket with a beige turtleneck sweater and holding a white laptop in her hands. The background is a spacious and bright interior space with multiple floors. There are three transparent elevators side by side in the background. The camera keeps a medium shot.", "visual_elements": [ { "content": "The camera is focused on a woman standing inside a modern building.", "type": "camera", "weight": 3 }, { "content": "The woman has black hair that is naturally curly.", "type": "attribute", "weight": 2 }, { "content": "The woman is wearing a peach suit jacket with a beige turtleneck sweater underneath.", "type": "attribute", "weight": 2 }, { "content": "The woman is holding a white laptop.", "type": "attribute", "weight": 2 }, { "content": "The background is a spacious and bright interior space with multiple floors.", "type": "scene", "weight": 3 }, { "content": "There are three transparent elevators side by side in the background.", "type": "scene", "weight": 2 }, { "content": "The camera keeps a medium shot.", "type": "camera", "weight": 3 } ] }, { "event": "The camera pans left around the woman. From the left side of the woman to the right side of the woman, the woman is always smiling at the camera. As the camera moves, the transparent elevator in the background goes up and down. When the camera turns to the right, the elevator disappears on the left side of the screen, and a corridor with several floors of stairs appears on the right side of the screen.", "visual_elements": [ { "content": "The camera pans left around the woman.", "type": "camera", "weight": 3 }, { "content": "The camera moves from the left side of the woman to the right side of the woman.", "type": "camera", "weight": 2 }, { "content": "The woman keeps looking at the camera with a smile.", "type": "action", "weight": 3 }, { "content": "As the camera moves, the transparent elevator in the background goes up and down.", "type": "scene", "weight": 2 }, { "content": "When the camera moves to the right, the elevator disappears on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "A corridor with several floors of stairs appears on the right side of the screen.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0664", "video_path": "Pexels/a04e347643cdffef111e4e9750506d08edf1ee76f4e9d9e41757bdfde850450e.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 16.32, "resolution": { "width": 3840, "height": 2160 }, "caption": "At the start of the video, the camera looks down from a high vantage point at an urban landscape. In the center of the scene is a tall glass curtain wall building, with its surface reflecting the surrounding scenery. Surrounding the building are high-rise buildings, all with glass curtain walls.\nAs the camera pans downward, the full view of the glass building in the center of the scene gradually becomes visible, with a reverse triangular base at the bottom. To the left of the building is a river, and to the right is a small garden. On the right side of the garden is a relatively shorter red brick building. The snow on the garden area has not melted yet, with winding traces left by pedestrians.\nThe camera continues panning downward. In front of the central building, a bridge spans across the river. In front of the bridge, a small park area appears along the riverbank, with a thin layer of snow covering the ground. The park features a uniquely shaped red sculpture.", "events": [ { "event": "At the start of the video, the camera looks down from a high vantage point at an urban landscape. In the center of the scene is a tall glass curtain wall building, with its surface reflecting the surrounding scenery. Surrounding the building are high-rise buildings, all with glass curtain walls.", "visual_elements": [ { "content": "The camera looks down from a high vantage point at an urban landscape.", "type": "camera", "weight": 3 }, { "content": "In the center of the scene is a tall glass curtain wall building.", "type": "attribute", "weight": 3 }, { "content": "Surrounding the building are high-rise buildings.", "type": "attribute", "weight": 2 }, { "content": "All of these buildings have glass curtain walls.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera pans downward, the full view of the glass building in the center of the scene gradually becomes visible, with a reverse triangular base at the bottom. To the left of the building is a river, and to the right is a small garden. On the right side of the garden is a relatively shorter red brick building. The snow on the garden area has not melted yet, with winding traces left by pedestrians.", "visual_elements": [ { "content": "The camera pans downward.", "type": "camera", "weight": 3 }, { "content": "The full view of the glass building in the center of the scene gradually becomes visible.", "type": "attribute", "weight": 1 }, { "content": "The bottom has a reverse triangular base.", "type": "attribute", "weight": 1 }, { "content": "To the left of the building is a river.", "type": "attribute", "weight": 2 }, { "content": "To the right is a small garden.", "type": "attribute", "weight": 1 }, { "content": "The snow on the garden area has not melted yet.", "type": "attribute", "weight": 2 }, { "content": "Winding traces are left by pedestrians walking through the snow.", "type": "attribute", "weight": 2 }, { "content": "To the right of the garden is a relatively shorter red brick building.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning downward. In front of the central building, a bridge spans across the river. In front of the bridge, a small park area appears along the riverbank, with a thin layer of snow covering the ground. The park features a uniquely shaped red sculpture.", "visual_elements": [ { "content": "The camera continues panning downward.", "type": "camera", "weight": 3 }, { "content": "In front of the central building, a bridge spans across the river.", "type": "attribute", "weight": 2 }, { "content": "In front of the bridge, a small park area appears along the riverbank.", "type": "attribute", "weight": 2 }, { "content": "The ground is covered with a thin layer of snow.", "type": "attribute", "weight": 2 }, { "content": "The park features a uniquely shaped red sculpture.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0665", "video_path": "Pexels/ccf609ff0f0e0a1ff45a8a6ac86e60096ad0b3775a8fd41dced1355f805eb61e.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 15.02, "resolution": { "width": 2160, "height": 3840 }, "caption": "The video begins with the camera focused on a tree-lined sidewalk. The backs of two women walking along the sidewalk appear in the center of the screen. The person on the left has short fluffy hair, a light grey long sleeve and black pants. The person on the right has long hair, a yellow long sleeve suit and a red hat. Both of them are carrying black backpacks and wearing a pair of white shoes. In the background, there is a row of vehicles on the left side of the path, and a red brick building on the right side, with neatly trimmed bushes next to the building. In the picture, there are trees on both sides of the path, the tree on the left is taller and the tree on the right is lower. The sun shines through the leaves and falls on the ground, creating mottled light and shadows.\nThe camera follows the two people moving forward. The person on the right side of the screen gestures as he walks, and finally jumps up and holds the arm of the person on the left, resting his head on the shoulder of the person on the left. In the background, a conspicuous red car on the left side of the road gradually enters the screen.", "events": [ { "event": "The video begins with the camera focused on a tree-lined sidewalk. The backs of two women walking along the sidewalk appear in the center of the screen. The person on the left has short fluffy hair, a light grey long sleeve and black pants. The person on the right has long hair, a yellow long sleeve suit and a red hat. Both of them are carrying black backpacks and wearing a pair of white shoes. In the background, there is a row of vehicles on the left side of the path, and a red brick building on the right side, with neatly trimmed bushes next to the building. In the picture, there are trees on both sides of the path, the tree on the left is taller and the tree on the right is lower. The sun shines through the leaves and falls on the ground, creating mottled light and shadows.", "visual_elements": [ { "content": "The camera is pointed at a tree-lined sidewalk.", "type": "camera", "weight": 3 }, { "content": "The backs of two women walking along the sidewalk appear in the center of the screen.", "type": "attribute", "weight": 3 }, { "content": "The person on the left side of the screen has fluffy short hair, light gray long sleeves and black pants.", "type": "attribute", "weight": 2 }, { "content": "The person on the right side of the screen has long hair, a yellow long-sleeved suit and a red hat.", "type": "attribute", "weight": 2 }, { "content": "Both of them are carrying black backpacks and wearing a pair of white shoes.", "type": "attribute", "weight": 1 }, { "content": "In the background, there is a row of vehicles on the left side of the path and a red brick building on the right side.", "type": "scene", "weight": 2 }, { "content": "There are neatly trimmed bushes next to the building.", "type": "scene", "weight": 1 }, { "content": "In the picture, there are trees on both sides of the path.", "type": "scene", "weight": 2 }, { "content": "In the picture, the trees on the left side of the path are taller, and the trees on the right side are lower.", "type": "scene", "weight": 1 }, { "content": "The sun shines through the leaves on the ground, forming mottled light and shadow.", "type": "scene", "weight": 1 } ] }, { "event": "The camera follows the two people moving forward. The person on the right side of the screen gestures as he walks, and finally jumps up and holds the arm of the person on the left, resting his head on the shoulder of the person on the left. In the background, a conspicuous red car on the left side of the road gradually enters the screen.", "visual_elements": [ { "content": "The camera follows the two people moving forward.", "type": "camera", "weight": 3 }, { "content": "The person on the right side of the screen gestures while walking, and finally jumps and holds the arm of the person on the left, and rests his head on the shoulder of the person on the left.", "type": "action", "weight": 2 }, { "content": "In the background, a conspicuous red car on the left side of the road gradually enters the picture.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0666", "video_path": "Pexels/cd54f523afd1dec415b2c4f53cbf7eca5e49e6c54430c920e1f64ceefb1a2fa3.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 11.88, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera looking down from a high altitude on a vast grassland with a few scattered trees and scattered cattle. In the distance of the scene are mountains, which are shrouded in mist.\nThe camera moves slowly to the right to show a wider area of the grassland, with the ground barren and dry. More cattle and trees can be seen, their branches swaying gently in the breeze. A path appears in the frame, which winds its way across the grassland into the distance.", "events": [ { "event": "The video begins with the camera looking down from a high altitude on a vast grassland with a few scattered trees and scattered cattle. In the distance of the scene are mountains, which are shrouded in mist.", "visual_elements": [ { "content": "The camera looks down on a vast grassland from a high altitude.", "type": "camera", "weight": 3 }, { "content": "The grassland has a few scattered trees and cattle.", "type": "scene", "weight": 2 }, { "content": "The mountains are in the distance.", "type": "scene", "weight": 2 }, { "content": "The mountains are shrouded in mist.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves slowly to the right to show a wider area of the grassland, with the ground barren and dry. More cattle and trees can be seen, their branches swaying gently in the breeze. A path appears in the frame, which winds its way across the grassland into the distance.", "visual_elements": [ { "content": "The camera moves slowly to the right", "type": "camera", "weight": 3 }, { "content": "Showing a wider area of the steppe.", "type": "scene", "weight": 2 }, { "content": "The ground is barren and cracked.", "type": "attribute", "weight": 2 }, { "content": "More cattle and trees can be seen.", "type": "attribute", "weight": 2 }, { "content": "The branches of the trees sway gently in the breeze.", "type": "scene", "weight": 1 }, { "content": "A path appears in the frame.", "type": "scene", "weight": 2 }, { "content": "The path winds through the grassland into the distance.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0667", "video_path": "Pexels/cd9be5c4192c2f65f6aff43959d9de4d2f953430478b47ccec141942120bce52.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 17.02, "resolution": { "width": 3840, "height": 2160 }, "caption": "The camera looks down from a high altitude at a residential area. In the scene, most of the houses are two to three stories tall, with red roofs. The residential area is not very dense, and there are spaces between the houses. Some of these spaces have been used to plant crops. A straight railway track runs through the residential area, with a train running on the track.\nThe camera pans forward, and the coastline appears on the left side of the scene, with sparkling sea and mountains stretching endlessly in the distance. On the right side of the scene, large greenhouses appear, and in the background, houses are densely scattered on the hillside.", "events": [ { "event": "The camera looks down from a high altitude at a residential area. In the scene, most of the houses are two to three stories tall, with red roofs. The residential area is not very dense, and there are spaces between the houses. Some of these spaces have been used to plant crops. A straight railway track runs through the residential area, with a train running on the track.", "visual_elements": [ { "content": "The camera looks down from a high altitude at a residential area.", "type": "camera", "weight": 3 }, { "content": "Most of the houses are two to three stories tall.", "type": "attribute", "weight": 2 }, { "content": "The roofs are mostly red.", "type": "attribute", "weight": 2 }, { "content": "There are spaces between the houses.", "type": "attribute", "weight": 2 }, { "content": "Some of these spaces have been used to plant crops.", "type": "attribute", "weight": 1 }, { "content": "A straight railway track runs through the residential area.", "type": "attribute", "weight": 2 }, { "content": "A train is running on the track.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans forward, and the coastline appears on the left side of the scene, with sparkling sea and mountains stretching endlessly in the distance. On the right side of the scene, large greenhouses appear, and in the background, houses are densely scattered on the hillside.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "The coastline appears on the left side of the scene.", "type": "attribute", "weight": 2 }, { "content": "The sea sparkles.", "type": "attribute", "weight": 1 }, { "content": "In the distance, the mountains stretch endlessly.", "type": "attribute", "weight": 1 }, { "content": "A large area of greenhouses appears on the right side of the scene.", "type": "attribute", "weight": 2 }, { "content": "In the background, houses are densely scattered on the hillside.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0668", "video_path": "Pexels/cda87daac0b965f06e69418477fc0f25d53c5f591a74539d0a1e8fefabf678b1.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 15.08, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with a close-up of the rear of a black Mercedes-Benz S550 sedan, with the taillights and emblems clearly visible. The vehicle is parked sideways in an empty parking lot, with some buildings and fences visible in the background.\nAs the camera slowly pulls back and away, the license plate number of the black Mercedes can be seen as JWO-65V.\nThe camera then continues to slowly move backward and upward as the rear of the black vehicle reflects the rays of the sun, and the vehicle's left exhaust pipe discharges some exhaust fumes.", "events": [ { "event": "The video begins with a close-up of the rear of a black Mercedes-Benz S550 sedan, with the taillights and emblems clearly visible. The vehicle is parked sideways in an empty parking lot, with some buildings and fences visible in the background.", "visual_elements": [ { "content": "The camera gives a close-up of the rear of a black Mercedes-Benz S550 sedan.", "type": "camera", "weight": 3 }, { "content": "The taillights and emblems are clearly visible.", "type": "attribute", "weight": 2 }, { "content": "The vehicle is parked sideways in an empty parking lot", "type": "attribute", "weight": 2 }, { "content": "Some buildings and fences can be seen in the background", "type": "scene", "weight": 2 } ] }, { "event": "As the camera slowly pulls back and away, the license plate number of the black Mercedes can be seen as JWO-65V.", "visual_elements": [ { "content": "The camera slowly pulls back.", "type": "camera", "weight": 3 }, { "content": "The license plate number of the black Mercedes can be seen as JWO-65V.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera then continues to slowly move backward and upward as the rear of the black vehicle reflects the rays of the sun, and the vehicle's left exhaust pipe discharges some exhaust fumes.", "visual_elements": [ { "content": "The camera continues to move slowly backward and upward.", "type": "camera", "weight": 3 }, { "content": "The rear of the black vehicle reflects the rays of the sun.", "type": "attribute", "weight": 2 }, { "content": "The left exhaust pipe of the vehicle is emitting some exhaust fumes.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 9 }, { "index": "TUNA_0669", "video_path": "Pexels/ced7c7292cb99db8a9667fdd21795aab1312497bc7dde263ccb8ede767a09967.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 12.47, "resolution": { "width": 3840, "height": 2160 }, "caption": "At the beginning of the video, the camera is focused on a young black man. The man has short black curly hair and wears a beige coat with a white inner layer. The collar of the coat is slightly open. The man has a green backpack on his back. The man has a white mobile phone in his left hand. The man looks down at the phone and smiles. The background is blurred, and there is a simple modern building. The sky is blue with a few white clouds dotted in between. On the right side of the picture, there is a blue pedestrian sign.\nThe camera keeps shaking left and right to maintain a close-up shot.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young black man. The man has short black curly hair and wears a beige coat with a white inner layer. The collar of the coat is slightly open. The man has a green backpack on his back. The man has a white mobile phone in his left hand. The man looks down at the phone and smiles. The background is blurred, and there is a simple modern building. The sky is blue with a few white clouds dotted in between. On the right side of the picture, there is a blue pedestrian sign.", "visual_elements": [ { "content": "The camera is focused on a young black man.", "type": "camera", "weight": 3 }, { "content": "The man has short black curly hair.", "type": "attribute", "weight": 2 }, { "content": "The man is wearing a beige coat with a white inner layer, and the collar of the coat is slightly open.", "type": "attribute", "weight": 2 }, { "content": "The man is carrying a green backpack on his back.", "type": "scene", "weight": 1 }, { "content": "The man has a white mobile phone in his left hand.", "type": "attribute", "weight": 2 }, { "content": "The man is looking down at his phone with a smile.", "type": "action", "weight": 3 }, { "content": "There is a simple modern building in the background.", "type": "scene", "weight": 2 }, { "content": "The sky is blue with a few white clouds dotted in between.", "type": "scene", "weight": 1 }, { "content": "On the right side of the picture, there is a blue pedestrian sign.", "type": "scene", "weight": 2 } ] }, { "event": "The camera keeps shaking left and right to maintain a close-up shot.", "visual_elements": [ { "content": "The camera keeps shaking left and right.", "type": "camera", "weight": 3 }, { "content": "The camera keeps shooting close-up.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0670", "video_path": "Pexels/d070d8e8657e4642eeb2cb118b8cc80592de7f7430cf4037ae13e53f95c297e0.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 14.17, "resolution": { "width": 4096, "height": 2160 }, "caption": "At the beginning of the video, the camera focuses on the entrance of a building. Above the entrance, there is a large sign with the words \"FINFINE ADARASH\" written in two languages. In the top left corner of the scene, part of a palm tree trunk is visible, with palm leaves extending outward. Behind the sign, the road leads to a building, where a crowd has gathered at the entrance. On both sides of the road, there are green trees.\nThe camera pans up while moving forward. Only the wooden roof of the entrance and the sign above it remain in the scene. The camera reaches beneath the roof, where the beam structure of the roof is visible in the scene. The weather is clear, and clouds are scattered in the light blue sky.", "events": [ { "event": "At the beginning of the video, the camera focuses on the entrance of a building. Above the entrance, there is a large sign with the words \"FINFINE ADARASH\" written in two languages. In the top left corner of the scene, part of a palm tree trunk is visible, with palm leaves extending outward. Behind the sign, the road leads to a building, where a crowd has gathered at the entrance. On both sides of the road, there are green trees.", "visual_elements": [ { "content": "The camera focuses on the entrance of a building.", "type": "camera", "weight": 3 }, { "content": "Above the entrance, there is a large sign.", "type": "attribute", "weight": 3 }, { "content": "The sign has the words \"FINFINE ADARASH\" written in two languages.", "type": "attribute", "weight": 2 }, { "content": "In the top left corner of the scene, part of a palm tree trunk is visible.", "type": "attribute", "weight": 1 }, { "content": "The palm leaves extend outward.", "type": "attribute", "weight": 2 }, { "content": "Behind the entrance, the road leads to a building.", "type": "attribute", "weight": 1 }, { "content": "A crowd has gathered at the entrance of the building.", "type": "attribute", "weight": 2 }, { "content": "On both sides of the road, there are green trees.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans up while moving forward. Only the wooden roof of the entrance and the sign above it remain in the scene. The camera reaches beneath the roof, where the beam structure of the roof is visible in the scene. The weather is clear, and clouds are scattered in the light blue sky.", "visual_elements": [ { "content": "The camera pans up while moving forward", "type": "camera", "weight": 3 }, { "content": "Only the wooden roof of the entrance and the sign above it remain in the scene.", "type": "attribute", "weight": 3 }, { "content": "The internal beam structure of the roof is visible in the scene.", "type": "attribute", "weight": 2 }, { "content": "The weather is clear.", "type": "scene", "weight": 2 }, { "content": "Clouds are scattered in the light blue sky.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0671", "video_path": "Pexels/d0ce8b71604cb84544c390b1adedf07e2eaa6909cdee364ae8ea7fed9ebf27df.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 17.12, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with a pregnant white woman and a brown dachshund. The dachshund has an orange collar around its neck. The woman lies on her side on a gray yoga mat on a yellow floor, with her head resting on two blue yoga bricks. The woman has brown hair and a large belly. The woman wears a gray sports bra and gray leggings. The woman's right hand is raised, holding a small yellow snack. The dachshund has its head raised high and is biting the snack in the woman's hand. There is a yellow wooden floor and some furniture, including tables and chairs, in the background. The tables and chairs are mainly red and reddish brown.\nThe camera then moves up and down with the dog's movements. The woman continues to tease the dachshund, shaking the snack in front of it, and the dachshund keeps adjusting its position to bite the snack. The woman's arm is sometimes raised and sometimes lowered, and the dachshund moves up and down. After the dog eats the snack, it lowers its head to chew.\nFinally, the camera moves forward and the dog's body gradually disappears to the left of the frame. The dog finishes the snack and raises its head to bite the snack in the woman's hand. The woman's hand strokes the dog's head as the dog lowers its head to chew.", "events": [ { "event": "The video begins with a pregnant white woman and a brown dachshund. The dachshund has an orange collar around its neck. The woman lies on her side on a gray yoga mat on a yellow floor, with her head resting on two blue yoga bricks. The woman has brown hair and a large belly. The woman wears a gray sports bra and gray leggings. The woman's right hand is raised, holding a small yellow snack. The dachshund has its head raised high and is biting the snack in the woman's hand. There is a yellow wooden floor and some furniture, including tables and chairs, in the background. The tables and chairs are mainly red and reddish brown.", "visual_elements": [ { "content": "The camera is focused on a pregnant white woman and a brown dachshund.", "type": "camera", "weight": 3 }, { "content": "The dachshund has an orange collar around its neck.", "type": "attribute", "weight": 1 }, { "content": "The woman is lying on her side on a gray yoga mat on a yellow floor.", "type": "attribute", "weight": 3 }, { "content": "The woman's head is resting on two blue yoga bricks.", "type": "attribute", "weight": 2 }, { "content": "The woman has brown hair and a big belly showing.", "type": "attribute", "weight": 2 }, { "content": "The woman is wearing a gray sports bra and gray leggings.", "type": "attribute", "weight": 2 }, { "content": "The woman's right hand is raised high, holding a small yellow snack.", "type": "action", "weight": 3 }, { "content": "The dachshund's head is raised high and biting the snack in the woman's hand.", "type": "action", "weight": 2 }, { "content": "There are yellow wooden floors and some furniture, including tables and chairs in the background.", "type": "scene", "weight": 3 }, { "content": "The tables and chairs are mainly red and reddish brown.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then moves up and down with the dog's movements. The woman continues to tease the dachshund, shaking the snack in front of it, and the dachshund keeps adjusting its position to bite the snack. The woman's arm is sometimes raised and sometimes lowered, and the dachshund moves up and down. After the dog eats the snack, it lowers its head to chew.", "visual_elements": [ { "content": "The camera then moves up and down with the dog's movements.", "type": "camera", "weight": 3 }, { "content": "The woman continues to tease the dachshund, shaking the snack in front of it.", "type": "action", "weight": 3 }, { "content": "The dachshund keeps adjusting its posture to bite the snack.", "type": "action", "weight": 2 }, { "content": "The woman's arm is sometimes raised and sometimes lowered, and the dachshund moves up and down accordingly.", "type": "action", "weight": 3 }, { "content": "The dog lowers its head to chew after eating the snack.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera moves forward and the dog's body gradually disappears to the left of the frame. The dog finishes the snack and raises its head to bite the snack in the woman's hand. The woman's hand strokes the dog's head as the dog lowers its head to chew.", "visual_elements": [ { "content": "Finally, the camera moves forward.", "type": "camera", "weight": 3 }, { "content": "The dog's body gradually disappears to the left of the screen.", "type": "attribute", "weight": 2 }, { "content": "After the dog finishes eating the snack, it raises its head and bites the snack in the woman's hand.", "type": "action", "weight": 2 }, { "content": "When the dog lowers its head to chew, the woman strokes the dog's head.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0672", "video_path": "Pexels/d1121ff1566b3b99cd0afaf12dfe2eceab514367ac31610998c000d320a0b8d8.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 25.0, "resolution": { "width": 3840, "height": 2160 }, "caption": "At the beginning of the video, the camera is focused on some apricots scattered on the wooden floor. The apricots are orange-yellow in color, and one of them has been bitten, revealing the pit. Some brown hair can be seen scattered on the floor in the background.\nThen, the camera moves slightly to the left and then to the right, and a fair-skinned hand appears in the frame, with the fingers gently rolling an apricot.\nAs the camera moves further forward, the focus of the camera shifts to the face of a young woman. The woman is dressed in green and wearing round earrings, which are silver. She also has a string of white bead chains around her neck, decorated with several small green flowers. Then, the camera follows the woman, shaking from side to side.", "events": [ { "event": "At the beginning of the video, the camera is focused on some apricots scattered on the wooden floor. The apricots are orange-yellow in color, and one of them has been bitten, revealing the pit. Some brown hair can be seen scattered on the floor in the background.", "visual_elements": [ { "content": "The camera is focused on some apricots scattered on the wooden floor.", "type": "camera", "weight": 3 }, { "content": "The apricots are orange-yellow in color.", "type": "attribute", "weight": 3 }, { "content": "One of the apricots has been bitten, revealing the pit.", "type": "attribute", "weight": 3 }, { "content": "Some brown hair can be seen scattered on the floor in the background.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera moves slightly to the left and then to the right, and a fair-skinned hand appears in the frame, with the fingers gently rolling an apricot.", "visual_elements": [ { "content": "The camera moves slightly to the left and then to the right.", "type": "camera", "weight": 3 }, { "content": "A fair-skinned hand appears in the frame.", "type": "attribute", "weight": 2 }, { "content": "The fingers gently rolls an apricot.", "type": "action", "weight": 3 } ] }, { "event": "As the camera moves further forward, the focus of the camera shifts to the face of a young woman. The woman is dressed in green and wearing round earrings, which are silver. She also has a string of white bead chains around her neck, decorated with several small green flowers. Then, the camera follows the woman, shaking from side to side.", "visual_elements": [ { "content": "The camera moves further forward.", "type": "camera", "weight": 3 }, { "content": "The focus of the camera shifts to the face of a young woman.", "type": "camera", "weight": 3 }, { "content": "The woman is dressed in green and wearing round earrings.", "type": "attribute", "weight": 2 }, { "content": "The earrings are silver.", "type": "attribute", "weight": 1 }, { "content": "The woman also has a string of white bead chains around her neck.", "type": "attribute", "weight": 3 }, { "content": "The bead chains are decorated with several small green flowers.", "type": "attribute", "weight": 2 }, { "content": "The camera follows the woman, shaking from side to side.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0673", "video_path": "Pexels/d19f10f779aaf1986f9b9673ec508721b01929b0e594dcee38e5d33a8d58e456.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 15.04, "resolution": { "width": 4096, "height": 2160 }, "caption": "At the beginning of the video, the camera is focused on two young policemen standing in front of the fence of a house. Both policemen are wearing black police uniforms with police equipment hanging around their waists. The white policeman on the left side of the picture holds a flashlight in his right hand, and the black policeman on the right side of the picture has his left hand behind his back. There is a black bag on the ground, and a silver toolbox next to it. There is an outline of a body drawn on the ground with white chalk. In the background, there is light shining on the fence and the middle pillar. Behind the fence is part of a house with a light-colored exterior wall. There is a yellow cordon on the fence, and the words on the cordon are clearly visible.\nThroughout the video, the camera keeps shaking. The white police officer on the left side of the screen turns on his flashlight, and the light of the flashlight moves left and right on the ground.", "events": [ { "event": "At the beginning of the video, the camera is focused on two young policemen standing in front of the fence of a house. Both policemen are wearing black police uniforms with police equipment hanging around their waists. The white policeman on the left side of the picture holds a flashlight in his right hand, and the black policeman on the right side of the picture has his left hand behind his back. There is a black bag on the ground, and a silver toolbox next to it. There is an outline of a body drawn on the ground with white chalk. In the background, there is light shining on the fence and the middle pillar. Behind the fence is part of a house with a light-colored exterior wall. There is a yellow cordon on the fence, and the words on the cordon are clearly visible.", "visual_elements": [ { "content": "The camera is focused on two young policemen standing in front of a house fence.", "type": "camera", "weight": 3 }, { "content": "Both policemen are wearing black police uniforms with police equipment hanging around their waists.", "type": "attribute", "weight": 3 }, { "content": "The white policeman on the left side of the screen holds a flashlight in his right hand.", "type": "attribute", "weight": 2 }, { "content": "The black policeman on the right side of the screen has his left hand behind his back.", "type": "attribute", "weight": 2 }, { "content": "There is a black bag on the ground, and a silver toolbox next to it.", "type": "attribute", "weight": 2 }, { "content": "There is a body outline drawn on the ground with white chalk.", "type": "attribute", "weight": 1 }, { "content": "In the background, there is light shining on the fence and the middle pillar.", "type": "scene", "weight": 2 }, { "content": "Behind the fence is part of the house, and the outer wall of the house is light-colored.", "type": "scene", "weight": 1 }, { "content": "There is a yellow cordon on the fence, and the text on the cordon is clearly visible.", "type": "scene", "weight": 1 } ] }, { "event": "Throughout the video, the camera keeps shaking. The white police officer on the left side of the screen turns on his flashlight, and the light of the flashlight moves left and right on the ground.", "visual_elements": [ { "content": "Throughout the video, the camera keeps shaking.", "type": "camera", "weight": 3 }, { "content": "The white policeman on the left side of the screen turns on the flashlight.", "type": "action", "weight": 3 }, { "content": "The flashlight moves left and right on the ground.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0674", "video_path": "Pexels/d391a8864ff27d39fc07317766bce41b347047e0f1573b7218a1071703d48392.mp4", "video_source": "Pexels", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 10.05, "resolution": { "width": 3840, "height": 2160 }, "caption": "At the beginning of the video, the camera is focused on a young black woman lying on her side in bed and a dog next to her. The woman is wearing a gray vest and black and white striped pants. The woman's left hand supports her head against the dark green quilt, and her right hand gently strokes a light-colored dog. The dog wears a red collar and has its mouth open. The bed sheet is off-white. On the left side of the screen is part of the room, with a dark brown wooden floor, a black chair and a small dark brown round table on the floor. There is a pot of green plants on the small round table, and the pot is made of brown and gray.\nThe camera gradually moves forward. The woman strokes the dog's head with her right hand while talking. The dog keeps looking at the woman.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young black woman lying on her side in bed and a dog next to her. The woman is wearing a gray vest and black and white striped pants. The woman's left hand supports her head against the dark green quilt, and her right hand gently strokes a light-colored dog. The dog wears a red collar and has its mouth open. The bed sheet is off-white. On the left side of the screen is part of the room, with a dark brown wooden floor, a black chair and a small dark brown round table on the floor. There is a pot of green plants on the small round table, and the pot is made of brown and gray.", "visual_elements": [ { "content": "The camera is focused on a young black woman lying on her side on a bed and a dog next to her.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a gray vest and black and white striped pants.", "type": "attribute", "weight": 2 }, { "content": "The woman's left hand supports her head against the dark green quilt, and her right hand gently strokes a light-colored dog.", "type": "action", "weight": 3 }, { "content": "The dog is wearing a red collar and has its mouth open.", "type": "action", "weight": 2 }, { "content": "The bed sheet is off-white.", "type": "scene", "weight": 2 }, { "content": "The floor is dark brown wood.", "type": "scene", "weight": 2 }, { "content": "There is a black chair and a small dark brown round table on the floor.", "type": "scene", "weight": 2 }, { "content": "There is a pot of green plants on the small round table.", "type": "scene", "weight": 2 }, { "content": "The pot is made of brown and gray.", "type": "scene", "weight": 1 } ] }, { "event": "The camera gradually moves forward. The woman strokes the dog's head with her right hand while talking. The dog keeps looking at the woman.", "visual_elements": [ { "content": "The camera gradually moves forward.", "type": "camera", "weight": 3 }, { "content": "The woman strokes the dog's head with her right hand while talking.", "type": "action", "weight": 3 }, { "content": "The dog keeps looking at the woman.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0675", "video_path": "Pixabay/0c71d41eb83a19471dd6832cc3b798ddb4f88e75ecf1c52da273eb3b74231f02.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 20.65, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera looking down from a high altitude on a winding road lined with trees covered in thick snow, with a black car moving in the center of the road.\nThe camera stays at an overhead angle throughout the shot focusing on a man in a dark jacket and gray pants, following the movement of the car with the trees lining the road in neat rows.\nThe camera moves with the curve of the road as the car travels along the winding road, showing the entire forest in its entirety.\nAt the end of the video, the camera stays high in the air, looking down on the entire snow-covered forest.", "events": [ { "event": "The video begins with the camera looking down from a high altitude on a winding road lined with trees covered in thick snow, with a black car moving in the center of the road.", "visual_elements": [ { "content": "The camera looking down from a high altitude on a winding road.", "type": "camera", "weight": 3 }, { "content": "The road is lined with trees covered in thick snow.", "type": "scene", "weight": 3 }, { "content": "A black car moving in the center of the road.", "type": "scene", "weight": 3 } ] }, { "event": "The camera stays at an overhead angle throughout the shot focusing on a man in a dark jacket and gray pants, following the movement of the car with the trees lining the road in neat rows.", "visual_elements": [ { "content": "The camera stays at an overhead angle focusing on a man wearing a dark jacket and gray pants.", "type": "camera", "weight": 3 }, { "content": "The trees on both sides of the road are well aligned.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves with the curve of the road as the car travels along the winding road, showing the entire forest in its entirety.", "visual_elements": [ { "content": "The camera moves with the curve of the road.", "type": "camera", "weight": 3 }, { "content": "The car travels along the winding road.", "type": "action", "weight": 2 }, { "content": "The camera shows the entire forest in its entirety.", "type": "scene", "weight": 2 } ] }, { "event": "At the end of the video, the camera stays high in the air, looking down on the entire snow-covered forest.", "visual_elements": [ { "content": "The camera stays high in the air, looking down on the entire snow-covered forest.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 9 }, { "index": "TUNA_0676", "video_path": "Pixabay/0eacdb8dd2bd8da7b98ab4156ab67753034c291b1e66e6267984f9f6d3029514.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 15.62, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera zooming out from a high altitude, capturing a suspension bridge spanning across the river. It is late in the evening, and the outlines of distant mountains are faintly visible. Several street lamps are installed along both sides of the bridge, emitting an orange-yellow glow that lights up the bridge deck.\nThe camera slowly pans forward, and cars are driving on both lanes of the bridge deck. The vehicles have their headlights on. At the top of the bridge towers of the cross-river bridge, red aviation obstruction lights are installed, flickering with faint red light. The water surface is divided into two parts. The water surface at the bottom of the scene reflects the lights on the bridge, sparkling. The water surface at the top of the scene is dark. The buildings at the foot of the distant mountains emit a faint glow.", "events": [ { "event": "The video begins with the camera zooming out from a high altitude, capturing a suspension bridge spanning across the river. It is late in the evening, and the outlines of distant mountains are faintly visible. Several street lamps are installed along both sides of the bridge, emitting an orange-yellow glow that lights up the bridge deck.", "visual_elements": [ { "content": "The camera zooms out from a high altitude, capturing a suspension bridge spanning across the river.", "type": "camera", "weight": 3 }, { "content": "It is late in the evening.", "type": "scene", "weight": 2 }, { "content": "The outlines of distant mountains are faintly visible.", "type": "scene", "weight": 2 }, { "content": "Several street lamps are installed along both sides of the bridge.", "type": "attribute", "weight": 2 }, { "content": "The street lamps emit an orange-yellow glow.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera slowly pans forward, and cars are driving on both lanes of the bridge deck. The vehicles have their headlights on. At the top of the bridge towers of the cross-river bridge, red aviation obstruction lights are installed, flickering with faint red light. The water surface is divided into two parts. The water surface at the bottom of the scene reflects the lights on the bridge, sparkling. The water surface at the top of the scene is dark. The buildings at the foot of the distant mountains emit a faint glow.", "visual_elements": [ { "content": "The camera slowly pans forward.", "type": "camera", "weight": 3 }, { "content": "Cars are driving on both lanes of the bridge deck.", "type": "action", "weight": 2 }, { "content": "The vehicles have their headlights on.", "type": "attribute", "weight": 2 }, { "content": "At the top of the bridge towers of the cross-river bridge, red aviation obstruction lights are installed.", "type": "attribute", "weight": 1 }, { "content": "The aviation obstruction lights flicker with faint red light.", "type": "attribute", "weight": 1 }, { "content": "The water surface at the bottom of the scene reflects the lights on the bridge", "type": "attribute", "weight": 1 }, { "content": "The water surface at the bottom of the scene is shimmering", "type": "attribute", "weight": 1 }, { "content": "The water surface at the top of the scene is dark", "type": "attribute", "weight": 1 }, { "content": "The buildings at the foot of the distant mountains emit a faint glow.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0677", "video_path": "Pixabay/297e1affbddd09318cefe5c7d004b11e019173f0b82f927b4fb009c51ddfe7ba.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 18.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a row of neatly arranged shopping carts, with red signs on the handles showing instructions for use. There is a horizontal bar in front of the shopping carts, with ropes wrapped around the bar to tie the shopping carts. There are some customers in the background, who are moving behind the shopping carts. Most of the customers in the picture are wearing heavy coats.\nThe camera moves slightly to the right and back, getting some distance from the shopping cart. A rope on the far left of the picture gradually disappears. Customers in the background change quickly, some enter and some leave. Customers shuttle in the supermarket, some are selecting products, while others are talking.", "events": [ { "event": "The video begins with the camera focused on a row of neatly arranged shopping carts, with red signs on the handles showing instructions for use. There is a horizontal bar in front of the shopping carts, with ropes wrapped around the bar to tie the shopping carts. There are some customers in the background, who are moving behind the shopping carts. Most of the customers in the picture are wearing heavy coats.", "visual_elements": [ { "content": "The camera is pointed at a row of neatly arranged shopping carts.", "type": "camera", "weight": 3 }, { "content": "There is a red logo on the handle of the shopping cart, showing the instructions for use.", "type": "attribute", "weight": 2 }, { "content": "There is a horizontal bar in front of the shopping cart.", "type": "attribute", "weight": 2 }, { "content": "There is a rope wrapped around the horizontal bar to tie the shopping cart.", "type": "attribute", "weight": 1 }, { "content": "There are some customers in the background.", "type": "scene", "weight": 3 }, { "content": "Customers are moving behind the shopping carts.", "type": "scene", "weight": 2 }, { "content": "Most of the customers are wearing heavy coats.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves slightly to the right and back, getting some distance from the shopping cart. A rope on the far left of the picture gradually disappears. Customers in the background change quickly, some enter and some leave. Customers shuttle in the supermarket, some are selecting products, while others are talking.", "visual_elements": [ { "content": "The camera moves slightly to the right and back, keeping a distance from the shopping carts.", "type": "camera", "weight": 3 }, { "content": "A rope on the far left of the picture gradually disappears.", "type": "attribute", "weight": 1 }, { "content": "Customers in the background change rapidly, some enter and some leave.", "type": "scene", "weight": 1 }, { "content": "Customers are moving around in the supermarket, some are selecting goods, and others are talking.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0678", "video_path": "Pixabay/300954ebb0894de1717b0990746d3c29093e3ad80b95eebb57bc4967a7467017.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 14.68, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on an open green meadow, where a group of cows are grazing. The cows are mainly brown in color, with black faces and legs. The cows are large in size and have curved horns on their heads. The background is a dense forest.\nThen, the camera shakes slightly, blocking the unidentified cows on the left side of the frame. The herd is scattered on the grass, with three cows in the front of the frame. Some are eating grass, while others are walking around.\nFinally, a cow at the back of the group walks to the front of the picture. The four cows in front of the picture move forward while eating grass, blocking the herd behind them. Occasionally, a cow will raise its head with grass in its mouth.", "events": [ { "event": "At the beginning of the video, the camera is focused on an open green meadow, where a group of cows are grazing. The cows are mainly brown in color, with black faces and legs. The cows are large in size and have curved horns on their heads. The background is a dense forest.", "visual_elements": [ { "content": "The camera is pointed at an open green meadow.", "type": "camera", "weight": 3 }, { "content": "In the picture, a group of cows are eating grass with their heads down.", "type": "action", "weight": 3 }, { "content": "The cows are mainly brown, with black faces and legs.", "type": "attribute", "weight": 3 }, { "content": "The cows are large, with curved horns on their heads.", "type": "attribute", "weight": 2 }, { "content": "The background is a dense forest.", "type": "scene", "weight": 3 } ] }, { "event": "Then, the camera shakes slightly, blocking the unidentified cows on the left side of the frame. The herd is scattered on the grass, with three cows in the front of the frame. Some are eating grass, while others are walking around.", "visual_elements": [ { "content": "Then, the camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "The cows on the left side of the picture are blocked.", "type": "attribute", "weight": 1 }, { "content": "The herd of cows is scattered on the grass, with three cows in the front of the picture.", "type": "attribute", "weight": 3 }, { "content": "Some cows are eating grass, while others are walking around.", "type": "action", "weight": 2 } ] }, { "event": "Finally, a cow at the back of the group walks to the front of the picture. The four cows in front of the picture move forward while eating grass, blocking the herd behind them. Occasionally, a cow will raise its head with grass in its mouth.", "visual_elements": [ { "content": "Finally, a cow at the back of the group walks to the front of the picture.", "type": "action", "weight": 2 }, { "content": "The four cows in the front of the picture eat grass and move forward to block the herd of cows behind them.", "type": "action", "weight": 3 }, { "content": "Occasionally, a cow will raise its head with grass in its mouth.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0679", "video_path": "Pixabay/31ca084d58f3338f7042d2a823d03abd76ddca472a265e287a070a5cc0863e76.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 18.42, "resolution": { "width": 2560, "height": 1440 }, "caption": "The video begins with the camera looking down from a high altitude on a vast valley flanked by lush mountains with peaks towering in the distance. The camera slowly moves forward, gradually approaching a winding path in the valley, which zigzags along the valley's topography, leading to a village in the distance.\nThe camera continues to move forward as the mountains on either side disappear into the frame, the village in the valley becomes clearer and clearer, and new farmland appears close to the village, which is terraced with rolling hills in the distance.", "events": [ { "event": "The video begins with the camera looking down from a high altitude on a vast valley flanked by lush mountains with peaks towering in the distance. The camera slowly moves forward, gradually approaching a winding path in the valley, which zigzags along the valley's topography, leading to a village in the distance.", "visual_elements": [ { "content": "The camera looks down on a vast valley from above.", "type": "camera", "weight": 3 }, { "content": "The valley is flanked by lush mountains.", "type": "scene", "weight": 2 }, { "content": "In the distance, the peaks of the mountains tower above the clouds.", "type": "scene", "weight": 2 }, { "content": "The camera slowly moves forward to approach a winding path in the valley.", "type": "camera", "weight": 3 }, { "content": "The path twists and turns along the valley's terrain, leading to a village in the distance.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera continues to move forward as the mountains on either side disappear into the frame, the village in the valley becomes clearer and clearer, and new farmland appears close to the village, which is terraced with rolling hills in the distance.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The mountains on both sides disappear into the frame.", "type": "scene", "weight": 2 }, { "content": "The village in the valley becomes clearer and clearer.", "type": "attribute", "weight": 2 }, { "content": "New farmland appears close to the village.", "type": "attribute", "weight": 2 }, { "content": "The farmland is terraced.", "type": "attribute", "weight": 2 }, { "content": "In the distance, the mountains continue to rise.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0680", "video_path": "Pixabay/32a1dd3c698763629d52decf2ac93e67575708983077a6c016b2325c0eea678e.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 15.0, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a magnificent building, the front of which features intricate carvings and decorations. The camera passes through an archway, with the edges of the arch framing both sides of the scene. Next to this archway, there is another part of the arch.\nThe camera pans forward, revealing multiple arched structures at the entrance of the building. Each arch contains statues in various poses, some standing, others holding objects. Next to the entrance stands a tall dome-shaped building, with a large circular rose window on the front. The window is surrounded by geometric patterns and statues. On top of the dome is a small tower.\nAs the camera pans forward, it tilts upward, leaving only the edges of the arch in the scene. The ornate building is backed by an ancient stone wall, which features an arched glass window. The background shows a blue sky.", "events": [ { "event": "The video begins with the camera focusing on a magnificent building, the front of which features intricate carvings and decorations. The camera passes through an archway, with the edges of the arch framing both sides of the scene. Next to this archway, there is another part of the arch.", "visual_elements": [ { "content": "The camera focuses on a magnificent building.", "type": "camera", "weight": 3 }, { "content": "The front of the building features intricate carvings and decorations.", "type": "attribute", "weight": 2 }, { "content": "The camera passes through an archway.", "type": "camera", "weight": 3 }, { "content": "The edges of the arch form a scene on both sides of the scene.", "type": "attribute", "weight": 2 }, { "content": "Next to this archway, there is another part of the arch.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera pans forward, revealing multiple arched structures at the entrance of the building. Each arch contains statues in various poses, some standing, others holding objects. Next to the entrance stands a tall dome-shaped building, with a large circular rose window on the front. The window is surrounded by geometric patterns and statues. On top of the dome is a small tower.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "Above the entrance passage of the building are multiple arched structures.", "type": "attribute", "weight": 2 }, { "content": "Each arch contains statues.", "type": "attribute", "weight": 2 }, { "content": "The statues are in various poses, some standing, others holding objects.", "type": "attribute", "weight": 1 }, { "content": "Next to the entrance stands a tall dome-shaped building.", "type": "attribute", "weight": 3 }, { "content": "The front of the building features a large circular rose window.", "type": "attribute", "weight": 2 }, { "content": "The window is surrounded by geometric patterns and statues.", "type": "attribute", "weight": 1 }, { "content": "On top of the dome is a small tower.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera pans forward, it tilts upward, leaving only the edges of the arch in the scene. The ornate building is backed by an ancient stone wall, which features an arched glass window. The background shows a blue sky.", "visual_elements": [ { "content": "As the camera pans forward, it tilts upward.", "type": "camera", "weight": 3 }, { "content": "The edges of the arch remain in the scene.", "type": "attribute", "weight": 1 }, { "content": "The ornate building is backed by an ancient stone wall.", "type": "attribute", "weight": 2 }, { "content": "The stone wall features an arched glass window.", "type": "attribute", "weight": 2 }, { "content": "The background shows a blue sky.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0681", "video_path": "Pixabay/3d2863475f71559886e5b7cf0e71e21c50feb285ad01c96e5f35812e58a75b2d.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 13.97, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a black dinner plate with an elaborate dish placed on it. The main components of the dish are several pieces of white chicken, sprinkled with green scallions and Chinese prickly ash grains. Beside the chicken, there is a bright red flower for decoration, and its leaves are also clearly visible.\nThe camera rotates counterclockwise. It can be seen that the chicken is covered with some green cucumber slices and crushed red peppers. Some sesame seeds and other spices are also mixed in the seasonings. The surface of the white chicken is shiny with oil.", "events": [ { "event": "At the beginning of the video, the camera is focused on a black dinner plate with an elaborate dish placed on it. The main components of the dish are several pieces of white chicken, sprinkled with green scallions and Chinese prickly ash grains. Beside the chicken, there is a bright red flower for decoration, and its leaves are also clearly visible.", "visual_elements": [ { "content": "The camera is focused on a black dinner plate.", "type": "camera", "weight": 3 }, { "content": "On the plate is placed an elaborate dish.", "type": "scene", "weight": 2 }, { "content": "The main components of the dish are several pieces of white chicken.", "type": "attribute", "weight": 3 }, { "content": "On the chicken are sprinkled with green scallions and Chinese prickly ash grains.", "type": "attribute", "weight": 2 }, { "content": "Beside the chicken, there is a bright red flower for decoration.", "type": "attribute", "weight": 2 }, { "content": "Leaves of the flower are also clearly visible.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera rotates counterclockwise. It can be seen that the chicken is covered with some green cucumber slices and crushed red peppers. Some sesame seeds and other spices are also mixed in the seasonings. The surface of the white chicken is shiny with oil.", "visual_elements": [ { "content": "The camera rotates counterclockwise.", "type": "camera", "weight": 3 }, { "content": "It can be seen that the chicken is covered with some green cucumber slices and crushed red peppers.", "type": "scene", "weight": 2 }, { "content": "Some sesame seeds and other spices are also mixed in the seasonings.", "type": "attribute", "weight": 1 }, { "content": "The surface of the white chicken is shiny with oil.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0682", "video_path": "Pixabay/427509c76e53a98f7ed712256ddf44559629b102ea811074e36775a03df46bf9.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 17.56, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with the camera focused on a round brown bird's nest made of woven branches. The nest is located on a green stump with some young branches with green leaves in front. A white stork stands on the nest with its head tilted back. The stork's feathers are white, with black spots on its wings, and its long orange-red beak and legs are very conspicuous. The stork stands on its left leg and tucks its right leg. The background is blurred, with green hills in the distance and sparse trees. Occasionally, there are a few houses with red roofs in the picture. The tone of the whole scene tends to be cold.\nThen, the camera starts to move in on the stork. The stork lowers its head and uses its right leg to groom the feathers on its chest. Then, the stork stops grooming its feathers and raises its head to look around.", "events": [ { "event": "The video begins with the camera focused on a round brown bird's nest made of woven branches. The nest is located on a green stump with some young branches with green leaves in front. A white stork stands on the nest with its head tilted back. The stork's feathers are white, with black spots on its wings, and its long orange-red beak and legs are very conspicuous. The stork stands on its left leg and tucks its right leg. The background is blurred, with green hills in the distance and sparse trees. Occasionally, there are a few houses with red roofs in the picture. The tone of the whole scene tends to be cold.", "visual_elements": [ { "content": "The camera is focused on a round brown bird's nest made of twigs.", "type": "camera", "weight": 3 }, { "content": "The nest is located on a green stump, with some young branches with green leaves in front.", "type": "scene", "weight": 3 }, { "content": "A white stork stands on the nest with its head tilted back.", "type": "attribute", "weight": 3 }, { "content": "The stork's feathers are white, with black spots on its wings, and its long orange-red beak and legs are very conspicuous.", "type": "attribute", "weight": 3 }, { "content": "The stork stands with its left leg supporting it and its right leg tucked away.", "type": "action", "weight": 2 }, { "content": "The background is blurred.", "type": "scene", "weight": 3 }, { "content": "The hills in the distance are green and the trees are sparse.", "type": "scene", "weight": 2 }, { "content": "Occasionally, there are a few houses with red roofs in the picture.", "type": "scene", "weight": 1 }, { "content": "The tone of the whole scene tends to be cold.", "type": "scene", "weight": 3 } ] }, { "event": "Then, the camera starts to move in on the stork. The stork lowers its head and uses its right leg to groom the feathers on its chest. Then, the stork stops grooming its feathers and raises its head to look around.", "visual_elements": [ { "content": "Then, the camera begins to move in towards the stork.", "type": "camera", "weight": 3 }, { "content": "The stork lowers its head and uses its right leg to comb the feathers on its chest.", "type": "action", "weight": 3 }, { "content": "Then, the stork stops combing its feathers and raises its head to look around.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0683", "video_path": "Pixabay/45770cb04186375e1903eceaa3e0f205667427462b44e8782b9514a60245e765.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 13.98, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera is focused on a gray dog ​​with black markings. The dog has a black nose, forward-turned ears, round eyes, and a military green collar around its neck. The dog's mouth is slightly open, revealing white teeth. The background is relatively blurred, a green meadow with some yellow leaves mixed in.\nThe camera then moves slightly to the left, zooming in on the dog's face. The dog's nose is wet, and its eyes are looking directly into the camera. The dog's tail is wagging gently behind it, and its ears move occasionally.\nFinally, the camera shakes violently. The collar on the dog's neck disappears from time to time in the lower right corner of the picture. Finally, the dog's ears stand up.", "events": [ { "event": "At the beginning of the video, the camera is focused on a gray dog ​​with black markings. The dog has a black nose, forward-turned ears, round eyes, and a military green collar around its neck. The dog's mouth is slightly open, revealing white teeth. The background is relatively blurred, a green meadow with some yellow leaves mixed in.", "visual_elements": [ { "content": "The camera is pointed at a grey dog ​​with black markings.", "type": "camera", "weight": 3 }, { "content": "The dog has a black nose, forward-turned ears and round eyes.", "type": "attribute", "weight": 2 }, { "content": "The dog has a military green collar around its neck.", "type": "attribute", "weight": 1 }, { "content": "The dog's mouth is slightly open, revealing white teeth.", "type": "action", "weight": 2 }, { "content": "The background is blurry.", "type": "scene", "weight": 3 }, { "content": "The background is a green meadow.", "type": "scene", "weight": 2 }, { "content": "The meadow is dotted with some yellow leaves.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then moves slightly to the left, zooming in on the dog's face. The dog's nose is wet, and its eyes are looking directly into the camera. The dog's tail is wagging gently behind it, and its ears move occasionally.", "visual_elements": [ { "content": "The camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The camera zooms in on the dog's face.", "type": "camera", "weight": 2 }, { "content": "The dog's nose is wet, and its eyes are looking directly at the camera.", "type": "attribute", "weight": 2 }, { "content": "The dog's tail sways gently behind it.", "type": "action", "weight": 2 }, { "content": "The dog's ears also move from time to time.", "type": "action", "weight": 1 } ] }, { "event": "Finally, the camera shakes violently. The collar on the dog's neck disappears from time to time in the lower right corner of the picture. Finally, the dog's ears stand up.", "visual_elements": [ { "content": "Finally, the camera shakes violently.", "type": "camera", "weight": 3 }, { "content": "The collar on the dog's neck disappears from time to time in the lower right corner of the picture.", "type": "attribute", "weight": 1 }, { "content": "Finally, the dog raises its ears.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0684", "video_path": "Pixabay/55a4f78b23acbf6f93b70cb4385c4a266d4371fd2ec9bef41401410c47a6967d.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 21.28, "resolution": { "width": 3840, "height": 2160 }, "caption": "The video begins with a high-altitude view of a busy city street intersection. The street is surrounded by railings, and outside the railings stand many uniformed people wearing hats, white short-sleeved shirts and black pants. The railings are crowded with people. Many people are wearing summer clothes, and some are holding Ukrainian flags. Among them, there is a large round platform in the crowd on the left side of the picture. There are several military green military vehicles in the middle of the street. The weather is blue, and in the background, the buildings are of different styles. On the left side of the picture is a classical building, and on the right side of the picture is a modern high-rise. There are traffic lights and signs on the street.\nThe camera slowly moves to the front right, and the large dome on the left side of the screen gradually disappears.", "events": [ { "event": "The video begins with a high-altitude view of a busy city street intersection. The street is surrounded by railings, and outside the railings stand many uniformed people wearing hats, white short-sleeved shirts and black pants. The railings are crowded with people. Many people are wearing summer clothes, and some are holding Ukrainian flags. Among them, there is a large round platform in the crowd on the left side of the picture. There are several military green military vehicles in the middle of the street. The weather is blue, and in the background, the buildings are of different styles. On the left side of the picture is a classical building, and on the right side of the picture is a modern high-rise. There are traffic lights and signs on the street.", "visual_elements": [ { "content": "The camera looks down from a high altitude at a busy city street intersection.", "type": "camera", "weight": 3 }, { "content": "The street is surrounded by railings.", "type": "attribute", "weight": 1 }, { "content": "Many people wearing hats and uniforms stand outside the railings.", "type": "attribute", "weight": 2 }, { "content": "The uniforms are white short-sleeved shirts and black pants.", "type": "attribute", "weight": 1 }, { "content": "The railings are crowded with people.", "type": "attribute", "weight": 3 }, { "content": "Many people are wearing summer clothes.", "type": "attribute", "weight": 2 }, { "content": "Some people hold the Ukrainian flag in their hands.", "type": "attribute", "weight": 1 }, { "content": "There is a large round platform in the crowd on the left side of the picture.", "type": "attribute", "weight": 2 }, { "content": "There are several military green military vehicles in the middle of the street.", "type": "attribute", "weight": 2 }, { "content": "There are traffic lights and signs on the street.", "type": "scene", "weight": 1 }, { "content": "The weather is blue.", "type": "scene", "weight": 1 }, { "content": "In the background, the buildings are of different styles.", "type": "scene", "weight": 2 }, { "content": "There is a classical building on the left side of the picture, and a modern high-rise building on the right side of the picture.", "type": "scene", "weight": 2 } ] }, { "event": "The camera slowly moves to the front right, and the large dome on the left side of the screen gradually disappears.", "visual_elements": [ { "content": "The camera moves slowly to the right front.", "type": "camera", "weight": 3 }, { "content": "The large round platform on the left side of the picture gradually disappears.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0685", "video_path": "Pixabay/656694340c763c872f3119c809279b6962a79affc421cd6a0892653ee312adfa.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 15.8, "resolution": { "width": 1920, "height": 1080 }, "caption": "At the beginning of the video, the camera looks down from a high altitude, showing a vast landfill. In the center of the picture is a wide garbage belt, with bare land on both sides. There are two bulldozers on the garbage belt, working face to face, and then continue to drive up the picture after meeting. In the background, the ground presents irregular sand and soil, and some areas are covered by mechanical tire marks. There is a pile of scattered black objects on the left side of the picture.\nThe camera moves forward, and the whole picture of the landfill gradually emerges. The boundary of the landfill gradually becomes clear. The garbage belt extends into the distance, and the land on both sides gradually transitions to normal landforms. The black area on the left side of the picture gradually disappears to the bottom of the picture.", "events": [ { "event": "At the beginning of the video, the camera looks down from a high altitude, showing a vast landfill. In the center of the picture is a wide garbage belt, with bare land on both sides. There are two bulldozers on the garbage belt, working face to face, and then continue to drive up the picture after meeting. In the background, the ground presents irregular sand and soil, and some areas are covered by mechanical tire marks. There is a pile of scattered black objects on the left side of the picture.", "visual_elements": [ { "content": "The camera is looking down from a high altitude.", "type": "camera", "weight": 3 }, { "content": "The camera shows a vast landfill.", "type": "attribute", "weight": 3 }, { "content": "In the center of the picture is a wide garbage belt with bare land on both sides.", "type": "attribute", "weight": 3 }, { "content": "There are two bulldozers on the garbage belt.", "type": "attribute", "weight": 2 }, { "content": "The bulldozers are working facing each other.", "type": "action", "weight": 1 }, { "content": "After the bulldozers meet, they continue to drive upwards.", "type": "action", "weight": 1 }, { "content": "In the background, the ground shows irregular sand and soil.", "type": "scene", "weight": 3 }, { "content": "Some areas are covered by tire marks of machinery.", "type": "scene", "weight": 2 }, { "content": "There is a pile of scattered black objects on the left side of the picture.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves forward, and the whole picture of the landfill gradually emerges. The boundary of the landfill gradually becomes clear. The garbage belt extends into the distance, and the land on both sides gradually transitions to normal landforms. The black area on the left side of the picture gradually disappears to the bottom of the picture.", "visual_elements": [ { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "The boundary of the landfill gradually becomes clear.", "type": "attribute", "weight": 3 }, { "content": "The garbage belt extends into the distance, and the land on both sides gradually transitions to normal landforms.", "type": "attribute", "weight": 2 }, { "content": "The black area on the left side of the screen gradually disappears to the bottom of the screen.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0686", "video_path": "Pixabay/94ca9969796d88be3e4675ff564505eff75c1fd54b4db87408af2c7e0035fde3.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 14.6, "resolution": { "width": 3840, "height": 2160 }, "caption": "At the beginning of the video, the camera focuses on a white pelican. The pelican has pure white feathers and a long, straight beak with light yellow and orange stripes. The pelican slowly swims to the right of the screen on the calm water surface, reflecting the pelican's figure. Yellow and green leaves are floating on the water. The background of the picture is dim.\nThe camera moves slowly to the right at a fixed angle, following the direction of the pelican's swimming. The camera gradually turns to the side of the pelican. The pelican uses its thin red and white legs to kick the water backwards to swim forward. There are some small ripples on the water surface, which spread out with the movement of the pelican. As the pelican swims, the reflections of other pelicans appear on the water surface above the screen.\nAt the end of the video, the camera continues to move to the right. The pelican's head gradually disappears to the top of the screen. The reflections of other pelicans on the water disappear as the pelican swims. More green and yellow leaves float on the water on the right side of the screen.", "events": [ { "event": "At the beginning of the video, the camera focuses on a white pelican. The pelican has pure white feathers and a long, straight beak with light yellow and orange stripes. The pelican slowly swims to the right of the screen on the calm water surface, reflecting the pelican's figure. Yellow and green leaves are floating on the water. The background of the picture is dim.", "visual_elements": [ { "content": "The camera focuses on a white pelican.", "type": "camera", "weight": 3 }, { "content": "The pelican has pure white feathers and a long, straight beak with light yellow and orange stripes.", "type": "attribute", "weight": 3 }, { "content": "The pelican is swimming slowly on the water to the right of the screen.", "type": "action", "weight": 3 }, { "content": "The water is calm, reflecting the pelican.", "type": "scene", "weight": 2 }, { "content": "Yellow and green leaves are floating on the water.", "type": "scene", "weight": 1 }, { "content": "The background of the screen is darker.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves slowly to the right at a fixed angle, following the direction of the pelican's swimming. The camera gradually turns to the side of the pelican. The pelican uses its thin red and white legs to kick the water backwards to swim forward. There are some small ripples on the water surface, which spread out with the movement of the pelican. As the pelican swims, the reflections of other pelicans appear on the water surface above the screen.", "visual_elements": [ { "content": "The camera moves slowly to the right at a fixed angle, following the direction of the pelican's swimming.", "type": "camera", "weight": 3 }, { "content": "The camera gradually turns to the side of the pelican.", "type": "camera", "weight": 2 }, { "content": "The pelican uses its thin red and white legs to kick the water backwards to swim forward.", "type": "action", "weight": 1 }, { "content": "The reflections of other pelicans appear on the water above the screen.", "type": "scene", "weight": 2 } ] }, { "event": "At the end of the video, the camera continues to move to the right. The pelican's head gradually disappears to the top of the screen. The reflections of other pelicans on the water disappear as the pelican swims. More green and yellow leaves float on the water on the right side of the screen.", "visual_elements": [ { "content": "The camera continues to move to the right.", "type": "camera", "weight": 2 }, { "content": "The pelican's head gradually disappears to the top of the screen.", "type": "attribute", "weight": 3 }, { "content": "The reflections of other pelicans on the water disappear as the pelican swims.", "type": "scene", "weight": 1 }, { "content": "More green and yellow leaves are floating on the water on the right side of the screen.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0687", "video_path": "Pixabay/b7b68754e5100b10c6629daf3d1eaadda61322c7f1578dfbf3b17515f127f124.mp4", "video_source": "Pixabay", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 12.45, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a blue sky with a few white clouds and a bright moon floating in the sky; the white clouds are spread out on the left side of the frame, and the bright moon hangs high in the upper right of the frame, with the clouds in various shapes. As time passes, the camera moves to the right to show more changes in the clouds, and the shape and arrangement of the clouds continue to change, with some gradually thinning and eventually disappearing into the blue sky.\nThe white clouds gradually drift away to the left side of the frame, and the bright moon gradually disappears into the upper right side of the frame.", "events": [ { "event": "The video begins with the camera focusing on a blue sky with a few white clouds and a bright moon floating in the sky; the white clouds are spread out on the left side of the frame, and the bright moon hangs high in the upper right of the frame, with the clouds in various shapes. As time passes, the camera moves to the right to show more changes in the clouds, and the shape and arrangement of the clouds continue to change, with some gradually thinning and eventually disappearing into the blue sky.", "visual_elements": [ { "content": "The camera focusing on a blue sky.", "type": "camera", "weight": 3 }, { "content": "There are a few white clouds and a bright moon floating in the sky.", "type": "scene", "weight": 3 }, { "content": "The white clouds are spread out on the left side of the frame.", "type": "attribute", "weight": 2 }, { "content": "The bright moon hangs high in the upper right of the picture.", "type": "attribute", "weight": 2 }, { "content": "Clouds come in different shapes.", "type": "attribute", "weight": 2 }, { "content": "The camera moves to the right to show more changes in the clouds.", "type": "camera", "weight": 3 }, { "content": "The shape and arrangement of the clouds continue to change.", "type": "attribute", "weight": 2 }, { "content": "Some clouds gradually thin out and eventually disappear into the blue sky.", "type": "attribute", "weight": 1 } ] }, { "event": "The white clouds gradually drift away to the left side of the frame, and the bright moon gradually disappears into the upper right side of the frame.", "visual_elements": [ { "content": "The white clouds gradually drift away to the left side of the picture, and the bright moon gradually disappears into the upper right side of the picture.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0688", "video_path": "MixKit/mixkit-harvest-fields-on-a-cloudy-day-2127.mp4", "video_source": "MixKit", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 20.02, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera looking down on a vast green field from an elevated position with rolling hills in the distance. The camera moves forward and there are some bushes and ditches in the field.\nThe camera continues to move forward as the trees and bushes in the field become clearer, showing another part of the field where there are more trees and bushes, creating a small wood.\nThe camera continues to advance and the image is fixed at an overhead perspective.", "events": [ { "event": "The video begins with the camera looking down on a vast green field from an elevated position with rolling hills in the distance. The camera moves forward and there are some bushes and ditches in the field.", "visual_elements": [ { "content": "The camera looking down on a vast green field from an elevated position.", "type": "camera", "weight": 3 }, { "content": "In the distance are rolling hills.", "type": "scene", "weight": 3 }, { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "There are some bushes and ditches in the field.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward as the trees and bushes in the field become clearer, showing another part of the field where there are more trees and bushes, creating a small wood.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The trees and bushes in the field become clearer.", "type": "scene", "weight": 2 }, { "content": "The shot shows another part of the field.", "type": "scene", "weight": 2 }, { "content": "More trees and bushes.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to advance and the image is fixed at an overhead perspective.", "visual_elements": [ { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "The scene is fixed at an overhead angle.", "type": "scene", "weight": 3 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0689", "video_path": "MixKit/mixkit-hill-in-the-middle-of-a-mountain-range-3877.mp4", "video_source": "MixKit", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 15.14, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera looking down on a vast mountainous landscape from a high altitude. There are hills and valleys in the scene, and the hills are covered with sparse vegetation, mainly some low shrubs and trees. There are some green fields and trees in the valleys and light clouds in the sky.\nThe camera moves slowly forward to show more of the mountainous landscape with a wider view. The tops of the hills on the right side of the frame are flatter and wider, and the valleys are gradually becoming more green with vegetation, and there is a patch of buildings and a road at the top of the hills.", "events": [ { "event": "The video begins with the camera looking down on a vast mountainous landscape from a high altitude. There are hills and valleys in the scene, and the hills are covered with sparse vegetation, mainly some low shrubs and trees. There are some green fields and trees in the valleys and light clouds in the sky.", "visual_elements": [ { "content": "The camera looks down on a vast mountainous landscape from a high altitude.", "type": "camera", "weight": 3 }, { "content": "There are hills and valleys in the scene.", "type": "scene", "weight": 2 }, { "content": "The hills are covered with sparse vegetation, mainly low shrubs and trees.", "type": "scene", "weight": 2 }, { "content": "There are some green fields and trees in the valleys.", "type": "attribute", "weight": 2 }, { "content": "There are light clouds in the sky.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves slowly forward to show more of the mountainous landscape with a wider view. The tops of the hills on the right side of the frame are flatter and wider, and the valleys are gradually becoming more green with vegetation, and there is a patch of buildings and a road at the top of the hills.", "visual_elements": [ { "content": "The camera moves slowly forward to show more of the mountainous landscape.", "type": "camera", "weight": 3 }, { "content": "The view is more open.", "type": "scene", "weight": 2 }, { "content": "The tops of the hills on the right are flatter and wider.", "type": "scene", "weight": 1 }, { "content": "There is more green vegetation in the valley.", "type": "scene", "weight": 2 }, { "content": "There is a patch of buildings and a road at the top of the hill.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0690", "video_path": "MixKit/mixkit-landscape-view-around-a-volcano-4674.mp4", "video_source": "MixKit", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 10.08, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a high peak, the outline of which is clearly visible in the morning sun. The sun is obscured by clouds and the sky takes on a light orange and purple color. The camera moves slowly to the left, showing a full view of the peak as well as a lake at the foot of the mountain.", "events": [ { "event": "The video begins with the camera focusing on a high peak, the outline of which is clearly visible in the morning sun. The sun is obscured by clouds and the sky takes on a light orange and purple color. The camera moves slowly to the left, showing a full view of the peak as well as a lake at the foot of the mountain.", "visual_elements": [ { "content": "The camera focuses on a high peak.", "type": "camera", "weight": 3 }, { "content": "The silhouette of the peak is clearly visible in the morning sun.", "type": "scene", "weight": 2 }, { "content": "The sky is pale orange and purple.", "type": "attribute", "weight": 2 }, { "content": "The camera moves slowly to the left to show a full view of the peak and the lake at the foot of the hill.", "type": "camera", "weight": 3 } ] } ], "n_events": 1, "n_elements": 4 }, { "index": "TUNA_0691", "video_path": "MixKit/mixkit-picnic-bench-in-the-woods-2641.mp4", "video_source": "MixKit", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 10.16, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a wooden picnic table, with the tabletop and benches looking a little old. In the background is an open meadow with a few scattered shrubs and trees.\nAs the camera moves upward, a large tree appears in the frame, its branches stretching out in all directions, some of them hanging low near the ground. The sunlight pours onto the ground through the gaps in the leaves, creating a dappled effect of light and shadow. The grass in the background gradually becomes blurred, and the horizon looms in the distance.\nAt the end of the video, the camera continues to move upward, leaving only the branches of the large tree stretching in all directions and the bright sky in the frame.", "events": [ { "event": "The video begins with the camera focusing on a wooden picnic table, with the tabletop and benches looking a little old. In the background is an open meadow with a few scattered shrubs and trees.", "visual_elements": [ { "content": "The camera focuses on a wooden picnic table.", "type": "camera", "weight": 3 }, { "content": "The table and benches look old.", "type": "scene", "weight": 2 }, { "content": "In the background is an open meadow.", "type": "scene", "weight": 2 }, { "content": "There are a few scattered bushes and trees in the meadow.", "type": "scene", "weight": 2 } ] }, { "event": "As the camera moves upward, a large tree appears in the frame, its branches stretching out in all directions, some of them hanging low near the ground. The sunlight pours onto the ground through the gaps in the leaves, creating a dappled effect of light and shadow. The grass in the background gradually becomes blurred, and the horizon looms in the distance.", "visual_elements": [ { "content": "The camera moves up.", "type": "scene", "weight": 2 }, { "content": "A large tree appears in the center of the frame.", "type": "attribute", "weight": 2 }, { "content": "The branches of the tree are spreading out in all directions.", "type": "attribute", "weight": 2 }, { "content": "Some of the branches hang low to the ground.", "type": "attribute", "weight": 2 }, { "content": "The sunlight pours onto the ground through the leaves.", "type": "attribute", "weight": 2 }, { "content": "Creating a dappled effect of light and shadow.", "type": "attribute", "weight": 2 }, { "content": "The grass in the background becomes blurred.", "type": "attribute", "weight": 2 }, { "content": "The horizon is visible in the distance.", "type": "attribute", "weight": 2 } ] }, { "event": "At the end of the video, the camera continues to move upward, leaving only the branches of the large tree stretching in all directions and the bright sky in the frame.", "visual_elements": [ { "content": "The camera continues to move upward.", "type": "camera", "weight": 3 }, { "content": "The only thing left in the frame are the branches of the big trees stretching out in all directions and the bright sky.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0692", "video_path": "MixKit/mixkit-rocky-mountain-covered-with-trees-seen-from-above-35943.mp4", "video_source": "MixKit", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 15.18, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera looking down on the entire mountain landscape from a high point. The mountains are covered with large and small rocks, with some green vegetation dotted between them.\nThe camera gradually pulls away to show a wider landscape, with the mountains stretching into the distance and city buildings in the distance. There are a few white clouds in the sky, and sunlight pours through them onto the mountains.", "events": [ { "event": "The video begins with the camera looking down on the entire mountain landscape from a high point. The mountains are covered with large and small rocks, with some green vegetation dotted between them.", "visual_elements": [ { "content": "The camera looks down on the entire landscape of the mountains from an elevated position.", "type": "camera", "weight": 3 }, { "content": "The mountains are covered with rocks of all sizes.", "type": "scene", "weight": 2 }, { "content": "The rocks are dotted with green vegetation.", "type": "scene", "weight": 2 } ] }, { "event": "The camera gradually pulls away to show a wider landscape, with the mountains stretching into the distance and city buildings in the distance. There are a few white clouds in the sky, and sunlight pours through them onto the mountains.", "visual_elements": [ { "content": "The camera gradually pulls away to show the wider landscape.", "type": "camera", "weight": 3 }, { "content": "The mountains extend into the distance.", "type": "attribute", "weight": 2 }, { "content": "In the distance are city buildings.", "type": "scene", "weight": 2 }, { "content": "There are a few white clouds in the sky.", "type": "attribute", "weight": 2 }, { "content": "The sun shines through the clouds on the mountains.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0693", "video_path": "MixKit/mixkit-stunning-view-on-top-of-a-mountain-3391.mp4", "video_source": "MixKit", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 15.06, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera looking down on a snow-covered mountain forest from a high altitude. The camera slowly moves forward to show a vast snowy landscape with trees covered in thick snow and mountains and clouds visible in the distance.\nThe camera then continues to move forward to show more snow and trees, displaying a panoramic view of the entire mountain range. The tops of the mountains and the clouds in the distance can be seen.", "events": [ { "event": "The video begins with the camera looking down on a snow-covered mountain forest from a high altitude. The camera slowly moves forward to show a vast snowy landscape with trees covered in thick snow and mountains and clouds visible in the distance.", "visual_elements": [ { "content": "The camera looks down on a snow-covered mountain forest from a high altitude.", "type": "camera", "weight": 3 }, { "content": "The camera moves slowly forward showing the vast snowy landscape.", "type": "camera", "weight": 3 }, { "content": "The trees are covered in thick snow.", "type": "scene", "weight": 2 }, { "content": "The mountains and clouds are also clearly visible in the distance.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera then continues to move forward to show more snow and trees, displaying a panoramic view of the entire mountain range. The tops of the mountains and the clouds in the distance can be seen.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "More snow and trees are shown.", "type": "scene", "weight": 2 }, { "content": "A panoramic view of the entire mountain range is shown.", "type": "scene", "weight": 2 }, { "content": "The tops of the mountains and the clouds in the distance can be seen.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0694", "video_path": "MixKit/mixkit-touring-a-lake-in-the-middle-of-a-large-pine-41377.mp4", "video_source": "MixKit", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 20.73, "resolution": { "width": 1920, "height": 1080 }, "caption": "The video begins with the camera focusing on a serene lake covered in a layer of green floating weeds. In the distance are a few dead trees. In the background is a dense forest with tall, lush trees.\nThe camera moves forward and the edge of the forest is gradually revealed. The grass at the edge of the lake is covered with green plants, there are floating weeds on the surface of the lake, and a few waterfowl occasionally fly over the water.\nAt the end of the video, the camera slowly lifts upwards, and at the end of the forest is a rolling mountain range, the top of which is shrouded in clouds.", "events": [ { "event": "The video begins with the camera focusing on a serene lake covered in a layer of green floating weeds. In the distance are a few dead trees. In the background is a dense forest with tall, lush trees.", "visual_elements": [ { "content": "The camera focuses on a peaceful lake.", "type": "camera", "weight": 3 }, { "content": "The surface of the lake is covered with a layer of green floating weeds.", "type": "scene", "weight": 2 }, { "content": "In the distance are a few dead trees.", "type": "scene", "weight": 3 }, { "content": "In the background is a dense forest.", "type": "scene", "weight": 2 }, { "content": "The trees are tall and lush.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves forward and the edge of the forest is gradually revealed. The grass at the edge of the lake is covered with green plants, there are floating weeds on the surface of the lake, and a few waterfowl occasionally fly over the water.", "visual_elements": [ { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "The edge of the forest gradually appears.", "type": "scene", "weight": 2 }, { "content": "The grass by the lake is full of green plants.", "type": "scene", "weight": 2 }, { "content": "There are floating weeds on the surface of the lake.", "type": "scene", "weight": 2 }, { "content": "Occasionally a few water birds fly over the water.", "type": "attribute", "weight": 2 } ] }, { "event": "At the end of the video, the camera slowly lifts upwards, and at the end of the forest is a rolling mountain range, the top of which is shrouded in clouds.", "visual_elements": [ { "content": "The camera slowly lifts upwards.", "type": "attribute", "weight": 2 }, { "content": "At the end of the forest is a rolling mountain range.", "type": "camera", "weight": 3 }, { "content": "The tops of the mountains are shrouded in clouds.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0695", "video_path": "VIDGEN-1M/-7DvWczOhz8-Scene-0185.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 6.88, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a young white male standing in front of a graffiti wall. The man has a black beard and short, slightly curly brown hair. The man is wearing a black leather jacket and black gloves. In his left hand, the man holds a mobile phone, which displays a colorful thermal image on the screen. The wall in front of the man is made of red bricks and painted white. There are graffiti and markings in various colors, including blue, black, purple and red. The graffiti on the wall varies in style, some are abstract patterns, and some are words and symbols.\nThen, the camera moves downwards, following the man's movements. The man moves downwards, holding the phone, with the phone on his chest. The man is scanning the wall. The thermal image on the phone screen keeps changing.\nThe camera then moves slightly to the left around the man. Black graffiti is shown on the wall.\nFinally, the camera moves slightly to the right as the man moves. The man holds the phone in front of his chest and moves slightly to the right.", "events": [ { "event": "The video begins with a young white male standing in front of a graffiti wall. The man has a black beard and short, slightly curly brown hair. The man is wearing a black leather jacket and black gloves. In his left hand, the man holds a mobile phone, which displays a colorful thermal image on the screen. The wall in front of the man is made of red bricks and painted white. There are graffiti and markings in various colors, including blue, black, purple and red. The graffiti on the wall varies in style, some are abstract patterns, and some are words and symbols.", "visual_elements": [ { "content": "The camera is focused on a young white man standing in front of a graffiti wall.", "type": "camera", "weight": 3 }, { "content": "The man has a black beard and short brown curly hair.", "type": "attribute", "weight": 2 }, { "content": "The man is wearing a black leather jacket and black gloves.", "type": "attribute", "weight": 2 }, { "content": "The man is holding a mobile phone in his left hand.", "type": "action", "weight": 3 }, { "content": "The screen of the mobile phone displays a colorful thermal image.", "type": "attribute", "weight": 1 }, { "content": "The wall in front of the man is made of red bricks and painted white.", "type": "scene", "weight": 3 }, { "content": "There are graffiti and markings in various colors on the wall, including blue, black, purple and red.", "type": "attribute", "weight": 3 }, { "content": "The graffiti on the wall has various styles, some are abstract patterns, and some are text and symbols.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera moves downwards, following the man's movements. The man moves downwards, holding the phone, with the phone on his chest. The man is scanning the wall. The thermal image on the phone screen keeps changing.", "visual_elements": [ { "content": "Then, the camera moves downwards with the man's movements.", "type": "camera", "weight": 3 }, { "content": "The man moves downwards with the mobile phone.", "type": "action", "weight": 3 }, { "content": "The mobile phone is placed on the man's chest.", "type": "attribute", "weight": 1 }, { "content": "The man is scanning the wall.", "type": "action", "weight": 2 }, { "content": "The thermal image on the mobile phone screen keeps changing.", "type": "action", "weight": 1 } ] }, { "event": "The camera then moves slightly to the left around the man. Black graffiti is shown on the wall.", "visual_elements": [ { "content": "Then, the camera moves slightly to the left around the man.", "type": "camera", "weight": 3 }, { "content": "Black graffiti is displayed on the wall.", "type": "attribute", "weight": 1 } ] }, { "event": "Finally, the camera moves slightly to the right as the man moves. The man holds the phone in front of his chest and moves slightly to the right.", "visual_elements": [ { "content": "Finally, the camera moves slightly to the right with the man's movements.", "type": "camera", "weight": 3 }, { "content": "The man holds the mobile phone in front of his chest and moves slightly to the right.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 17 }, { "index": "TUNA_0696", "video_path": "VIDGEN-1M/-fPdRvIYOnU-Scene-0147.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 6.67, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at an automatic ticket machine. The appearance of the ticket machine is silver, with white and yellow logos on the blue display. There is a card slot and a ticket slot on the ticket machine. There is a sign on the upper display of the ticket machine that reads \"Please collect your Standard Tickets here\". There is a hand on the right side of the picture, with the palm slightly upward, putting a green banknote into the banknote slot of the ticket machine.\nThe camera moves slightly to the right. The hand slightly adjusts the position of the banknote, and the banknote is slowly swallowed by the machine. Then, the red indicator light of the ticket outlet lights up. After a few seconds, the ticket outlet spits out the banknote.\nThe camera finally moves slightly to the left to show more of the area to the left of the ticket machine. There is a white sign with red letters on it to the left of the ticket machine. The camera is always shaking during the whole process.", "events": [ { "event": "At the beginning of the video, the camera is pointed at an automatic ticket machine. The appearance of the ticket machine is silver, with white and yellow logos on the blue display. There is a card slot and a ticket slot on the ticket machine. There is a sign on the upper display of the ticket machine that reads \"Please collect your Standard Tickets here\". There is a hand on the right side of the picture, with the palm slightly upward, putting a green banknote into the banknote slot of the ticket machine.", "visual_elements": [ { "content": "The camera is pointed at an automatic ticket machine.", "type": "camera", "weight": 3 }, { "content": "The appearance of the ticket machine is silver.", "type": "attribute", "weight": 2 }, { "content": "The blue display screen of the ticket machine is equipped with white and yellow logos.", "type": "attribute", "weight": 1 }, { "content": "There is a card slot and a ticket slot on the ticket machine.", "type": "attribute", "weight": 2 }, { "content": "There is a sign on the upper display screen of the ticket machine that reads \"Please collect your Standard Tickets here\".", "type": "attribute", "weight": 1 }, { "content": "There is a hand on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The palm is slightly upward, and a green banknote is being put into the banknote slot of the ticket machine.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves slightly to the right. The hand slightly adjusts the position of the banknote, and the banknote is slowly swallowed by the machine. Then, the red indicator light of the ticket outlet lights up. After a few seconds, the ticket outlet spits out the banknote.", "visual_elements": [ { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The hand slightly adjusts the position of the banknote, and the banknote is slowly swallowed by the machine.", "type": "action", "weight": 2 }, { "content": "Then, the red indicator light of the ticket slot lights up.", "type": "action", "weight": 1 }, { "content": "After a few seconds, the ticket slot spits out the banknote.", "type": "action", "weight": 2 } ] }, { "event": "The camera finally moves slightly to the left to show more of the area to the left of the ticket machine. There is a white sign with red letters on it to the left of the ticket machine. The camera is always shaking during the whole process.", "visual_elements": [ { "content": "The camera finally moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The camera shows more of the left side of the ticket machine.", "type": "attribute", "weight": 1 }, { "content": "There is a white sign with red letters on the left side of the ticket machine.", "type": "attribute", "weight": 1 }, { "content": "The camera is shaking all the time during the whole process.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0697", "video_path": "VIDGEN-1M/-sD8SYiKZt4-Scene-0001.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 6.07, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the screen focuses on a catcher wearing gray and red protective gear and a helmet. He squats behind home plate and moves to the left to adjust his position, with his gloved hands ready to catch the ball.\nNext, the camera switches to a wider angle, showing a baseman wearing black protective clothing standing behind the catcher. The baseman is half-crouching on the ground with his legs bent, staring at the pitching place. To the left of the catcher stands a batter wearing a white jersey and a black helmet, holding the bat with his hands wearing red gloves ready to hit the ball. Not far from them stands a pitcher wearing a gray jersey and gray pants and a blue baseball cap, standing to his right ready to throw. In the background are spectators, \"KeyBank\" and \"TOYOTA\" billboards. The field is composed of green grass, circular land and red areas.\nThe pitcher begins to prepare to pitch, raising his right leg and leaning forward to throw the ball. The catcher remains in a squatting position, holding the glove in his left hand ready to catch the ball. The batter raises his left leg to maintain a batting posture. After the pitcher throws the ball, the batter hits the ball and hits it out of the right side of the screen, and then he intends to run to the right. The basemen and catcher stand up one after another as the bat moves.", "events": [ { "event": "At the beginning of the video, the screen focuses on a catcher wearing gray and red protective gear and a helmet. He squats behind home plate and moves to the left to adjust his position, with his gloved hands ready to catch the ball.", "visual_elements": [ { "content": "The screen focuses on a catcher wearing gray and red protective gear and a helmet.", "type": "attribute", "weight": 3 }, { "content": "The catcher crouches behind home plate and moves left to adjust his position.", "type": "action", "weight": 3 }, { "content": "Hands in gloves ready to catch the ball.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera switches to a wider angle, showing a baseman wearing black protective clothing standing behind the catcher. The baseman is half-crouching on the ground with his legs bent, staring at the pitching place. To the left of the catcher stands a batter wearing a white jersey and a black helmet, holding the bat with his hands wearing red gloves ready to hit the ball. Not far from them stands a pitcher wearing a gray jersey and gray pants and a blue baseball cap, standing to his right ready to throw. In the background are spectators, \"KeyBank\" and \"TOYOTA\" billboards. The field is composed of green grass, circular land and red areas.", "visual_elements": [ { "content": "The camera switches to a wider angle.", "type": "camera", "weight": 3 }, { "content": "Behind the catcher in the picture stands a baseman in black protective clothing.", "type": "attribute", "weight": 3 }, { "content": "The baseman has his legs bent and is half-crouching on the ground.", "type": "attribute", "weight": 2 }, { "content": "The baseman is staring at the pitching area.", "type": "attribute", "weight": 2 }, { "content": "To the left of the catcher stands a batter in a white jersey and a black helmet.", "type": "attribute", "weight": 3 }, { "content": "The batter holds the bat with both hands in red gloves, ready to hit the ball.", "type": "action", "weight": 3 }, { "content": "Not far from the catcher and the batter stands a pitcher in a gray jersey, gray pants, and a blue baseball cap.", "type": "attribute", "weight": 3 }, { "content": "The pitcher stands to the right, ready to throw the ball.", "type": "attribute", "weight": 2 }, { "content": "In the background are the audience seats, \"KeyBank\" and \"TOYOTA\" billboards.", "type": "scene", "weight": 2 }, { "content": "The field is composed of green lawns, circular land, and red areas.", "type": "scene", "weight": 2 } ] }, { "event": "The pitcher begins to prepare to pitch, raising his right leg and leaning forward to throw the ball. The catcher remains in a squatting position, holding the glove in his left hand ready to catch the ball. The batter raises his left leg to maintain a batting posture. After the pitcher throws the ball, the batter hits the ball and hits it out of the right side of the screen, and then he intends to run to the right. The basemen and catcher stand up one after another as the bat moves.", "visual_elements": [ { "content": "The pitcher begins to prepare to pitch.", "type": "action", "weight": 2 }, { "content": "The pitcher raises his right leg and leans forward to throw the ball.", "type": "action", "weight": 3 }, { "content": "The catcher remains in a squatting position.", "type": "action", "weight": 2 }, { "content": "The catcher holds the glove in his left hand and prepares to catch the ball.", "type": "action", "weight": 3 }, { "content": "The batter raises his left leg to keep in a batting position.", "type": "action", "weight": 3 }, { "content": "After the pitcher throws the ball, the batter hits the ball and hits it out of the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The batter intends to run to the right.", "type": "action", "weight": 2 }, { "content": "The basemen and catcher stand up one after another as the batting action.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0698", "video_path": "VIDGEN-1M/0Cfm2MQSfLk-Scene-0021.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 8.48, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is aimed at the ice hockey rink, where several players are playing. The audience seats in the picture are full of spectators, and there are billboards and stadium logos in the background. Players wearing blue jerseys and players wearing white jerseys are in fierce competition on the ice hockey rink. The camera moves backward and right, and the player in the blue jersey is in control of the ball and the player in the white jersey is defending.\nThe player in the blue jersey swings his stick, and the puck flies over the defense of the player in the white jersey and flies to the goal. Then the camera quickly raises up and shoots the goalkeeper in the white jersey stretching his body to block the ball from a bird's-eye view, but the puck still flies into the goal successfully. Two players in blue jerseys and a player in white jersey appear on the left side of the picture. The two slide past the goalkeeper, and the player in the blue jersey raises his arms and shouts in celebration.", "events": [ { "event": "At the beginning of the video, the camera is aimed at the ice hockey rink, where several players are playing. The audience seats in the picture are full of spectators, and there are billboards and stadium logos in the background. Players wearing blue jerseys and players wearing white jerseys are in fierce competition on the ice hockey rink. The camera moves backward and right, and the player in the blue jersey is in control of the ball and the player in the white jersey is defending.", "visual_elements": [ { "content": "The camera is aimed at the ice hockey rink.", "type": "camera", "weight": 3 }, { "content": "There are many players playing on the rink.", "type": "attribute", "weight": 3 }, { "content": "The audience seats in the picture are full of spectators.", "type": "scene", "weight": 2 }, { "content": "There are billboards and stadium logos in the background.", "type": "scene", "weight": 2 }, { "content": "The player in blue jerseys and the player in white jerseys are fighting fiercely on the ice hockey rink.", "type": "action", "weight": 3 }, { "content": "The camera moves backward and to the right.", "type": "camera", "weight": 3 }, { "content": "The player in blue jerseys is controlling the ball and the player in white jerseys is defending.", "type": "action", "weight": 3 } ] }, { "event": "The player in the blue jersey swings his stick, and the puck flies over the defense of the player in the white jersey and flies to the goal. Then the camera quickly raises up and shoots the goalkeeper in the white jersey stretching his body to block the ball from a bird's-eye view, but the puck still flies into the goal successfully. Two players in blue jerseys and a player in white jersey appear on the left side of the picture. The two slide past the goalkeeper, and the player in the blue jersey raises his arms and shouts in celebration.", "visual_elements": [ { "content": "The player in the blue jersey swings his stick.", "type": "action", "weight": 3 }, { "content": "The puck flies past the defense of the player in the white jersey and toward the goal.", "type": "action", "weight": 3 }, { "content": "The camera quickly lifts up.", "type": "camera", "weight": 3 }, { "content": "The goalkeeper in the white jersey stretches his body to block the ball from a bird's-eye view.", "type": "camera", "weight": 3 }, { "content": "The puck still flies successfully into the goal.", "type": "action", "weight": 3 }, { "content": "Two players in blue jerseys and a player in white jersey appear on the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The two slide past the goalkeeper.", "type": "action", "weight": 2 }, { "content": "The player in the blue jersey raises his arms and shouts in celebration.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0699", "video_path": "VIDGEN-1M/0E8a-U9nJfw-Scene-0055.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 26.86, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a middle-aged Caucasian man appears on the scene. He is wearing a blue sweater and khaki pants, carrying a gray backpack. To the man's left is a large screen displaying a clip of a rugby match. The man stands sideways on the left side of the screen, gripping the straps of his backpack tightly with both hands, his gaze fixed on the screen. Behind him, there is a large glass window through which industrial buildings can be seen outside.\nAs the video starts, the camera pans to the right, showing the players on the screen vigorously competing on the field. The man turns towards the camera and then turns sideways to look at the screen again. The screen switches to a scene where Player Number 10, wearing a green jersey, is quickly running with the ball, while Player Number 36, wearing a red jersey, is chasing him from behind.\nThe game footage on the scene focuses on Player Number 10. Number 10 runs out of the playing field, facing a fence covered with a black curtain, behind which are fans and spectators. Player Number 10 throws the rugby ball in front of the fence, and the people behind it eagerly bend down to pick it up. Number 10 then faces the stands and performs a celebratory dance, to which the audience cheers and waves their arms in excitement. The man beside the large screen lets his hands fall naturally to his sides, turns his body towards the camera, and his gaze remains fixed on the screen.\nThe man facing the camera mimics Player Number 10's celebratory dance, swinging his arms alternately to his sides. The screen in the background loops the footage of the players celebrating. In the foreground on the left side of the video, a pink table appears, with two black water cups and some papers placed on it.", "events": [ { "event": "At the beginning of the video, a middle-aged Caucasian man appears on the scene. He is wearing a blue sweater and khaki pants, carrying a gray backpack. To the man's left is a large screen displaying a clip of a rugby match. The man stands sideways on the left side of the screen, gripping the straps of his backpack tightly with both hands, his gaze fixed on the screen. Behind him, there is a large glass window through which industrial buildings can be seen outside.", "visual_elements": [ { "content": "In the scene, a middle-aged Caucasian man appears.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a blue sweater and khaki pants.", "type": "attribute", "weight": 2 }, { "content": "The man is carrying a gray backpack.", "type": "attribute", "weight": 2 }, { "content": "To the man's left is a large screen.", "type": "attribute", "weight": 3 }, { "content": "The screen is displaying a clip of a rugby match.", "type": "attribute", "weight": 3 }, { "content": "The man stands sideways on the left side of the screen, gripping the straps of his backpack tightly with both hands, his gaze fixed on the screen.", "type": "attribute", "weight": 3 }, { "content": "Behind the man, there is a large glass window can be seen.", "type": "scene", "weight": 2 }, { "content": "Through the window, industrial buildings can be seen outside.", "type": "scene", "weight": 2 } ] }, { "event": "As the video starts, the camera pans to the right, showing the players on the screen vigorously competing on the field. The man turns towards the camera and then turns sideways to look at the screen again. The screen switches to a scene where Player Number 10, wearing a green jersey, is quickly running with the ball, while Player Number 36, wearing a red jersey, is chasing him from behind.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The players on the screen vigorously competing on the field.", "type": "action", "weight": 3 }, { "content": "The man turns towards the camera and then turns sideways to look at the screen again.", "type": "action", "weight": 3 }, { "content": "The screen switches to a scene where Player Number 10, wearing a green jersey, is quickly running with the ball.", "type": "scene", "weight": 3 }, { "content": "Behind Player Number 10, Player Number 36, wearing a red jersey, is chasing him from behind.", "type": "attribute", "weight": 2 } ] }, { "event": "The game footage on the scene focuses on Player Number 10. Number 10 runs out of the playing field, facing a fence covered with a black curtain, behind which are fans and spectators. Player Number 10 throws the rugby ball in front of the fence, and the people behind it eagerly bend down to pick it up. Number 10 then faces the stands and performs a celebratory dance, to which the audience cheers and waves their arms in excitement. The man beside the large screen lets his hands fall naturally to his sides, turns his body towards the camera, and his gaze remains fixed on the screen.", "visual_elements": [ { "content": "The game footage on the scene focuses on Player Number 10.", "type": "scene", "weight": 3 }, { "content": "Number 10 runs out of the playing field.", "type": "action", "weight": 3 }, { "content": "In front of Number 10 is a fence covered with a black curtain.", "type": "attribute", "weight": 2 }, { "content": "Behind the fence are fans and spectators.", "type": "attribute", "weight": 2 }, { "content": "Player Number 10 throws the rugby ball in front of the fence.", "type": "action", "weight": 3 }, { "content": "The people behind the fence eagerly bend down to pick it up.", "type": "action", "weight": 2 }, { "content": "Number 10 then faces the stands and performs a celebratory dance.", "type": "action", "weight": 3 }, { "content": "The audience cheers and waves their arms in excitement.", "type": "action", "weight": 3 }, { "content": "The man beside the large screen lets his hands fall naturally to his sides.", "type": "action", "weight": 1 }, { "content": "The man turns his body towards the camera, and his gaze remains fixed on the screen.", "type": "action", "weight": 1 } ] }, { "event": "The man facing the camera mimics Player Number 10's celebratory dance, swinging his arms alternately to his sides. The screen in the background loops the footage of the players celebrating. In the foreground on the left side of the video, a pink table appears, with two black water cups and some papers placed on it.", "visual_elements": [ { "content": "The man facing the camera mimics Player Number 10's celebratory dance.", "type": "action", "weight": 3 }, { "content": "The man is swinging his arms alternately to his sides.", "type": "action", "weight": 3 }, { "content": "The screen in the background loops the footage of the players celebrating.", "type": "scene", "weight": 2 }, { "content": "In the foreground on the left side of the video, a pink table appears.", "type": "scene", "weight": 1 }, { "content": "Two black water cups and some papers placed on the table.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0700", "video_path": "VIDGEN-1M/0KVeRUg60YU-Scene-0070.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 10.41, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, two male boxers are seen competing on a boxing ring. The boxer on the left is wearing red shorts, while the one on the right is wearing black shorts. Both are wearing boxing gloves and standing barefoot on the ring. The ring is surrounded by white ropes, and the ground is printed with words like \"GLORY 27 CHICAGO\". In the background, the audience seating and some advertising boards can be seen. The boxer in red shorts pushes the boxer in black shorts against the ropes on the right side of the ring, and both are engaging in a clinch. The referee stands in the center of the ring, observing their movements.\nThe camera slowly pans to the left, and the two boxers separate. The boxer in red shorts launches a series of punches, forcing the boxer in black shorts to retreat and defend. The two boxers circle from the right side of the ring to the left side. The referee constantly adjusts his position on the ring according to their rhythm.\nThe camera stops panning as the boxer in black shorts slips and falls to his knees. The referee quickly approaches the fallen boxer and gestures to pause the match, pushing the boxer in red shorts away from the fallen opponent. The boxer in red shorts turns and walks towards the center of the ring, facing away from his fallen rival. The camera pans backward as the boxer in red shorts walks towards it. In the background, the audience and photographers are all focused on the situation on the ring.", "events": [ { "event": "At the beginning of the video, two male boxers are seen competing on a boxing ring. The boxer on the left is wearing red shorts, while the one on the right is wearing black shorts. Both are wearing boxing gloves and standing barefoot on the ring. The ring is surrounded by white ropes, and the ground is printed with words like \"GLORY 27 CHICAGO\". In the background, the audience seating and some advertising boards can be seen. The boxer in red shorts pushes the boxer in black shorts against the ropes on the right side of the ring, and both are engaging in a clinch. The referee stands in the center of the ring, observing their movements.", "visual_elements": [ { "content": "Two male boxers are seen competing on a boxing ring.", "type": "attribute", "weight": 3 }, { "content": "The boxer on the left is wearing red shorts.", "type": "attribute", "weight": 3 }, { "content": "The boxer on the right is wearing black shorts.", "type": "attribute", "weight": 3 }, { "content": "Both are wearing boxing gloves and standing barefoot on the ring.", "type": "attribute", "weight": 3 }, { "content": "The ring is surrounded by white ropes.", "type": "attribute", "weight": 1 }, { "content": "The ground on the ring is printed with words like \"GLORY 27 CHICAGO\".", "type": "attribute", "weight": 1 }, { "content": "In the background, the audience seating and some advertising boards can be seen.", "type": "scene", "weight": 2 }, { "content": "The boxer in red shorts pushes the boxer in black shorts against the ropes on the right side of the ring.", "type": "action", "weight": 3 }, { "content": "Both are engaging in a clinch.", "type": "action", "weight": 2 }, { "content": "The referee stands in the center of the ring, observing their movements.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera slowly pans to the left, and the two boxers separate. The boxer in red shorts launches a series of punches, forcing the boxer in black shorts to retreat and defend. The two boxers circle from the right side of the ring to the left side. The referee constantly adjusts his position on the ring according to their rhythm.", "visual_elements": [ { "content": "The camera slowly pans to the left.", "type": "camera", "weight": 3 }, { "content": "The two boxers separate.", "type": "action", "weight": 3 }, { "content": "The boxer in red shorts launches a series of punches.", "type": "action", "weight": 3 }, { "content": "The boxer in black shorts is forcing to retreat and defend.", "type": "action", "weight": 3 }, { "content": "The two boxers circle from the right side of the ring to the left side.", "type": "action", "weight": 3 }, { "content": "The referee constantly adjusts his position on the ring according to their rhythm.", "type": "action", "weight": 2 } ] }, { "event": "The camera stops panning as the boxer in black shorts slips and falls to his knees. The referee quickly approaches the fallen boxer and gestures to pause the match, pushing the boxer in red shorts away from the fallen opponent. The boxer in red shorts turns and walks towards the center of the ring, facing away from his fallen rival. The camera pans backward as the boxer in red shorts walks towards it. In the background, the audience and photographers are all focused on the situation on the ring.", "visual_elements": [ { "content": "The camera stops panning.", "type": "camera", "weight": 3 }, { "content": "The boxer in black shorts slips and falls to his knees.", "type": "action", "weight": 3 }, { "content": "The referee quickly approaches the fallen boxer in black shorts.", "type": "action", "weight": 3 }, { "content": "The referee makes a geature to pause the match.", "type": "action", "weight": 2 }, { "content": "The referee is pushing the boxer in red shorts away from the boxer in black shorts.", "type": "action", "weight": 3 }, { "content": "The boxer in red shorts turns and walks towards the center of the ring, facing away from his fallen rival.", "type": "action", "weight": 3 }, { "content": "The camera pans backward.", "type": "camera", "weight": 3 }, { "content": "The boxer in red shorts walks towards to the camera.", "type": "action", "weight": 3 }, { "content": "In the background, the audience and photographers are all focused on the situation on the ring.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 25 }, { "index": "TUNA_0701", "video_path": "VIDGEN-1M/0Mh33wQcAQY-Scene-0046.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 10.34, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a calm lake with clear water reflecting the blue sky and white clouds. In the distance is a dense forest with lush trees.\nThe video begins with the camera focusing on a calm lake with clear water reflecting the blue sky and white clouds, and lush trees.\nThe camera moves to the right and the bow of a small boat appears at the bottom of the frame, with some water stains and signs of wear and tear on the bow. A hand appears in the frame, pointing in a certain direction on the lake. The camera moves to the left and a reflection in the calm water of the lake is clearly visible.", "events": [ { "event": "The video begins with the camera focusing on a calm lake with clear water reflecting the blue sky and white clouds. In the distance is a dense forest with lush trees.", "visual_elements": [ { "content": "The camera focuses on a calm lake.", "type": "camera", "weight": 3 }, { "content": "The clear water reflects the blue sky and white clouds.", "type": "scene", "weight": 2 }, { "content": "In the distance is a dense forest.", "type": "scene", "weight": 2 }, { "content": "The trees are lush and green.", "type": "attribute", "weight": 2 } ] }, { "event": "The video begins with the camera focusing on a calm lake with clear water reflecting the blue sky and white clouds, and lush trees.\nThe camera moves to the right and the bow of a small boat appears at the bottom of the frame, with some water stains and signs of wear and tear on the bow. A hand appears in the frame, pointing in a certain direction on the lake. The camera moves to the left and a reflection in the calm water of the lake is clearly visible.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The bow of a small boat appears at the bottom of the frame.", "type": "scene", "weight": 2 }, { "content": "There are some water stains and wear marks on the bow.", "type": "attribute", "weight": 2 }, { "content": "A hand appears in the frame.", "type": "attribute", "weight": 2 }, { "content": "The hand is pointing in the direction of the lake.", "type": "action", "weight": 1 }, { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The reflection in the calm water of the lake is clearly visible.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0702", "video_path": "VIDGEN-1M/0a5aapSdz-M-Scene-0028.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Scene,Multi-Subject", "domain": "Sports Activity", "duration": 4.74, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, four cyclists appear in the center of the picture. They ride towards the camera from a distance, and the camera also moves in. The background is a vast expanse of dry cracked ground, with mountains and a gradient sky in the distance. The sky with a few white clouds presents a lavender and pink sunset glow.\nThe camera switches to a side view. The fastest rider is wearing a black and gray short-sleeved shirt, gray shorts, and a white helmet; the second rider is wearing a black riding suit and a black helmet; the third rider is wearing a green short-sleeved shirt, black shorts, and a green helmet; the fourth rider is wearing a pink short-sleeved shirt, black shorts, and a blue helmet. The camera follows the direction of the four riders.\nNext, the camera switches to the left front view of the four riders again. The four riders continue to ride. The rider wearing a black and gray short-sleeved shirt and gray shorts looks up and looks forward, while the other three riders look down and look at the ground.", "events": [ { "event": "At the beginning of the video, four cyclists appear in the center of the picture. They ride towards the camera from a distance, and the camera also moves in. The background is a vast expanse of dry cracked ground, with mountains and a gradient sky in the distance. The sky with a few white clouds presents a lavender and pink sunset glow.", "visual_elements": [ { "content": "Four cyclists appear in the center of the screen.", "type": "attribute", "weight": 3 }, { "content": "The four people ride towards the camera from a distance.", "type": "action", "weight": 3 }, { "content": "The camera moves closer.", "type": "camera", "weight": 3 }, { "content": "The background is a vast dry and cracked ground.", "type": "scene", "weight": 3 }, { "content": "In the distance are continuous mountains and a gradient sky.", "type": "scene", "weight": 2 }, { "content": "The sky with a few white clouds presents a lavender and pink sunset glow.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a side view. The fastest rider is wearing a black and gray short-sleeved shirt, gray shorts, and a white helmet; the second rider is wearing a black riding suit and a black helmet; the third rider is wearing a green short-sleeved shirt, black shorts, and a green helmet; the fourth rider is wearing a pink short-sleeved shirt, black shorts, and a blue helmet. The camera follows the direction of the four riders.", "visual_elements": [ { "content": "The camera switches to a side view.", "type": "camera", "weight": 3 }, { "content": "The fastest cyclist is wearing a black and gray short-sleeved shirt, gray shorts, and a white helmet.", "type": "attribute", "weight": 3 }, { "content": "The second cyclist is wearing a black cycling suit and a black helmet.", "type": "attribute", "weight": 3 }, { "content": "The third cyclist is wearing a green short-sleeved shirt, black shorts, and a green helmet.", "type": "attribute", "weight": 3 }, { "content": "The fourth cyclist is wearing a pink short-sleeved shirt, black shorts, and a blue helmet.", "type": "attribute", "weight": 3 }, { "content": "The camera moves in the direction of the four people's riding.", "type": "camera", "weight": 3 } ] }, { "event": "Next, the camera switches to the left front view of the four riders again. The four riders continue to ride. The rider wearing a black and gray short-sleeved shirt and gray shorts looks up and looks forward, while the other three riders look down and look at the ground.", "visual_elements": [ { "content": "The camera switches to the left front view of the four people.", "type": "camera", "weight": 3 }, { "content": "The four cyclists continue to ride.", "type": "action", "weight": 2 }, { "content": "The cyclist wearing a black and gray short-sleeved shirt and gray shorts looks up and looks forward.", "type": "attribute", "weight": 2 }, { "content": "The other three look down and look at the ground.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0703", "video_path": "VIDGEN-1M/0gZ8PtqED1o-Scene-0028.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 9.43, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a basketball court surrounded by seated spectators and brightly lit. In the scene, a player wearing a red jersey (with the number 34 on the back) is jumping high in the air, holding the ball with both hands ready to shoot. At the same time, several players wearing white jerseys are defending around him.\nThe camera pans upwards as Player Number 34 in the red jersey shoots the ball towards the basket. Player Number 40 in the white jersey stretches out his right arm attempting to block the shot. The ball arcs through the air but ultimately fails to go through the basket, bouncing back instead. Player Number 40 in the white jersey lowers his raised right arm.\nThe camera pans to the left as Player Number 34 in the red jersey successfully grabs the rebounding basketball and takes another shot. The ball arcs through the air once again and finally goes through the basket. The other players in white jerseys surround the basket, watching this moment.", "events": [ { "event": "At the beginning of the video, the camera focuses on a basketball court surrounded by seated spectators and brightly lit. In the scene, a player wearing a red jersey (with the number 34 on the back) is jumping high in the air, holding the ball with both hands ready to shoot. At the same time, several players wearing white jerseys are defending around him.", "visual_elements": [ { "content": "The camera focuses on a basketball court.", "type": "camera", "weight": 3 }, { "content": "The basketball court is surrounded by seated spectators.", "type": "scene", "weight": 2 }, { "content": "The lit is brightly.", "type": "scene", "weight": 2 }, { "content": "In the scene, a player wearing a red jersey (with the number 34 on the back) .", "type": "attribute", "weight": 3 }, { "content": "Player Number 34 in the red jersey is jumping high in the air, holding the ball with both hands ready to shoot.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans upwards as Player Number 34 in the red jersey shoots the ball towards the basket. Player Number 40 in the white jersey stretches out his right arm attempting to block the shot. The ball arcs through the air but ultimately fails to go through the basket, bouncing back instead. Player Number 40 in the white jersey lowers his raised right arm.", "visual_elements": [ { "content": "Several players wearing white jerseys are defending around him.", "type": "attribute", "weight": 3 }, { "content": "The camera pans upwards.", "type": "camera", "weight": 3 }, { "content": "Player Number 34 in the red jersey shoots the ball towards the basket.", "type": "action", "weight": 3 }, { "content": "Player Number 40 in the white jersey stretches out his right arm attempting to block the shot.", "type": "action", "weight": 3 }, { "content": "The ball arcs through the air but ultimately fails to go through the basket, bouncing back instead.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans to the left as Player Number 34 in the red jersey successfully grabs the rebounding basketball and takes another shot. The ball arcs through the air once again and finally goes through the basket. The other players in white jerseys surround the basket, watching this moment.", "visual_elements": [ { "content": "Player Number 40 in the white jersey lowers his raised right arm.", "type": "action", "weight": 2 }, { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "Player Number 34 in the red jersey successfully grabs the rebounding basketball.", "type": "action", "weight": 3 }, { "content": "Player Number 34 takes another shot.", "type": "action", "weight": 3 }, { "content": "The ball arcs through the air once again and finally goes through the basket.", "type": "action", "weight": 2 }, { "content": "The other players in white jerseys surround the basket, watching this moment.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0704", "video_path": "VIDGEN-1M/0jG0Vw0SC3w-Scene-0047.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 4.21, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a wet street. In the night scene, the trees on both sides of the street are adorned with twinkling holiday lights. Several cars can be seen slowly driving down the street, with their headlights intertwining with the colorful lights on the street. On the right side of the street stands a building with white columns.\nThe camera rotates to the left, revealing white window scenes of the building and black fences connecting the white columns in front of the door.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wet street. In the night scene, the trees on both sides of the street are adorned with twinkling holiday lights. Several cars can be seen slowly driving down the street, with their headlights intertwining with the colorful lights on the street. On the right side of the street stands a building with white columns.", "visual_elements": [ { "content": "The camera focuses on a wet street.", "type": "camera", "weight": 3 }, { "content": "The shooting time is at night.", "type": "scene", "weight": 2 }, { "content": "The trees on both sides of the street are adorned with twinkling holiday lights.", "type": "attribute", "weight": 2 }, { "content": "Several cars slowly drives down the street.", "type": "action", "weight": 2 }, { "content": "The headlights of the cars intertwine with the colorful street lights.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the street stands a building with white columns.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera rotates to the left, revealing white window scenes of the building and black fences connecting the white columns in front of the door.", "visual_elements": [ { "content": "The camera rotates to the left.", "type": "camera", "weight": 3 }, { "content": "The window scenes of the building are white.", "type": "attribute", "weight": 1 }, { "content": "Black fences connect the white columns in front of the door.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0705", "video_path": "VIDGEN-1M/0kk24-ZNIkI-Scene-0041.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 13.9, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the foredeck of a ship. A young woman stands on the deck, dressing in a light-colored sports vest and dark shorts. She holds a fishing pole with both hands, leaning slightly forward. The backdrop is the vast sea, with the distant horizon where the sea meets the dazzling sun. A few small boats dot the surface of the water, and rocky mountain peaks can be seen in the distance.\nThe camera shakes up and down as a man dressed in a blue long-sleeved shirt and blue shorts enters from the right side of the scene. He is holding a fishing pole and first walks to the right side of the woman, looking down at the sea. There is also a camera and the side face of a man visible on the right side of the scene.\nThe camera cuts to show the man holding the fishing pole walking to the left side of the woman. The camera pans backward, revealing a man in a light-colored long-sleeved shirt on the right side of the scene, with a camera hanging around his neck. A green sunshade cloth on the ship enters the scene from above.", "events": [ { "event": "At the beginning of the video, the camera focuses on the foredeck of a ship. A young woman stands on the deck, dressing in a light-colored sports vest and dark shorts. She holds a fishing pole with both hands, leaning slightly forward. The backdrop is the vast sea, with the distant horizon where the sea meets the dazzling sun. A few small boats dot the surface of the water, and rocky mountain peaks can be seen in the distance.", "visual_elements": [ { "content": "The camera focuses on the foredeck of a ship.", "type": "camera", "weight": 3 }, { "content": "A young woman stands on the deck", "type": "attribute", "weight": 3 }, { "content": "The woman is dressing in a light-colored sports vest and dark shorts.", "type": "attribute", "weight": 3 }, { "content": "The woman holds a fishing pole with both hands, leaning slightly forward.", "type": "attribute", "weight": 3 }, { "content": "The backdrop is the vast sea, with the distant horizon .", "type": "scene", "weight": 2 }, { "content": "The distant sea meets the dazzling sun.", "type": "scene", "weight": 2 }, { "content": "A few small boats dot the surface of the water.", "type": "scene", "weight": 2 }, { "content": "Rocky mountain peaks can be seen in the distance.", "type": "scene", "weight": 2 } ] }, { "event": "The camera shakes up and down as a man dressed in a blue long-sleeved shirt and blue shorts enters from the right side of the scene. He is holding a fishing pole and first walks to the right side of the woman, looking down at the sea. There is also a camera and the side face of a man visible on the right side of the scene.", "visual_elements": [ { "content": "The camera shakes up and down.", "type": "camera", "weight": 3 }, { "content": "A man dressed in a blue long-sleeved shirt and blue shorts enters from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The man is holding a fishing pole.", "type": "attribute", "weight": 2 }, { "content": "The man first walks to the right side of the woman, looking down at the sea.", "type": "action", "weight": 3 }, { "content": "There is also a camera and the side face of a man visible on the right side of the scene.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to show the man holding the fishing pole walking to the left side of the woman. The camera pans backward, revealing a man in a light-colored long-sleeved shirt on the right side of the scene, with a camera hanging around his neck. A green sunshade cloth on the ship enters the scene from above.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The man holding the fishing pole walking to the left side of the woman.", "type": "action", "weight": 3 }, { "content": "The camera pans backward.", "type": "camera", "weight": 3 }, { "content": "A man in a light-colored long-sleeved shirt on the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "A camera hanging around the man's neck.", "type": "attribute", "weight": 2 }, { "content": "A green sunshade cloth on the ship enters the scene from above.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0706", "video_path": "VIDGEN-1M/0mf3Iv3iUs0-Scene-0004.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 17.06, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white man standing in the airport's rear hall. The man is facing the French window with his back to the camera. The man is wearing a gray hoodie with black sleeves, a white baseball cap worn backwards on his head, and a black backpack. The man holds a passport and boarding pass in his left hand and pulls a black suitcase with his right hand. There are parked planes and a snow-covered airport runway outside the window. The floor of the terminal hall is dark, and the walls and ceiling are white. There is a barrier on the left side of the screen and an elevator on the right side of the screen.\nThen, the camera moves toward the man, changing from a long shot to a medium shot. In the picture, the man's image becomes clearer. There are red letters on the baseball cap on the man's head. The man turns sideways to the camera and points his right hand out the window while talking. There are some equipment and small red lights on the runway in the background. There is a black street lamp on the platform outside the window facing the man.\nFinally, the camera pans slightly to the right around the man. The man gestures with his hands while talking, and occasionally turns to face the window. There is an elevator inside the isolation zone in the waiting hall on the left side of the screen. The camera shakes throughout the video.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white man standing in the airport's rear hall. The man is facing the French window with his back to the camera. The man is wearing a gray hoodie with black sleeves, a white baseball cap worn backwards on his head, and a black backpack. The man holds a passport and boarding pass in his left hand and pulls a black suitcase with his right hand. There are parked planes and a snow-covered airport runway outside the window. The floor of the terminal hall is dark, and the walls and ceiling are white. There is a barrier on the left side of the screen and an elevator on the right side of the screen.", "visual_elements": [ { "content": "The camera is focused on a young white male standing in the airport's rear hall.", "type": "camera", "weight": 3 }, { "content": "The man has his back to the camera and is facing the floor-to-ceiling window.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a gray hoodie with black sleeves, a white baseball cap worn backwards, and a black backpack.", "type": "attribute", "weight": 2 }, { "content": "The man is holding a passport and boarding pass in his left hand.", "type": "attribute", "weight": 1 }, { "content": "He is pulling a black suitcase with his right hand.", "type": "attribute", "weight": 1 }, { "content": "There are parked planes and a snow-covered airport runway outside the window.", "type": "scene", "weight": 3 }, { "content": "The floor in the terminal is dark.", "type": "scene", "weight": 2 }, { "content": "The walls and ceiling are white.", "type": "scene", "weight": 1 }, { "content": "There is a barrier on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "There is an elevator on the right side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves toward the man, changing from a long shot to a medium shot. In the picture, the man's image becomes clearer. There are red letters on the baseball cap on the man's head. The man turns sideways to the camera and points his right hand out the window while talking. There are some equipment and small red lights on the runway in the background. There is a black street lamp on the platform outside the window facing the man.", "visual_elements": [ { "content": "Then, the camera moves toward the man.", "type": "camera", "weight": 3 }, { "content": "The camera changes from a long shot to a medium shot.", "type": "camera", "weight": 3 }, { "content": "In the picture, the man's image is clearer.", "type": "attribute", "weight": 2 }, { "content": "There are red letters on the baseball cap on the man's head.", "type": "attribute", "weight": 1 }, { "content": "The man turns sideways to the camera and points his right hand out the window while talking.", "type": "action", "weight": 3 }, { "content": "There are some equipment and small red lights on the airport runway in the background.", "type": "scene", "weight": 1 }, { "content": "There is a black street lamp on the platform outside the window facing the man.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera pans slightly to the right around the man. The man gestures with his hands while talking, and occasionally turns to face the window. There is an elevator inside the isolation zone in the waiting hall on the left side of the screen. The camera shakes throughout the video.", "visual_elements": [ { "content": "Finally, the camera pans slightly to the right around the man.", "type": "camera", "weight": 3 }, { "content": "The man gestures with both hands while talking.", "type": "action", "weight": 2 }, { "content": "The man occasionally turns to face the window.", "type": "action", "weight": 1 }, { "content": "There is an elevator in the barrier in the terminal hall on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "The camera shakes throughout the video.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0707", "video_path": "VIDGEN-1M/0n0vg5LXXXo-Scene-0045.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 11.98, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera focuses on a rushing stream of water with white waves on the surface, the camera moves upwards to reveal rocky cliffs on either side, the rocks covered with green vegetation. The water rushes out from the gaps between the rocks, forming a spectacular waterfall.\nThe camera continues to move upward, settling on a panoramic view of the waterfall, which cascades down from a high point emitting water spray.", "events": [ { "event": "The camera focuses on a rushing stream of water with white waves on the surface, the camera moves upwards to reveal rocky cliffs on either side, the rocks covered with green vegetation. The water rushes out from the gaps between the rocks, forming a spectacular waterfall.", "visual_elements": [ { "content": "The camera focuses on a fast-flowing stream.", "type": "camera", "weight": 3 }, { "content": "A white wave rises above the water.", "type": "scene", "weight": 3 }, { "content": "The camera moves up to reveal rocky cliffs on either side.", "type": "camera", "weight": 3 }, { "content": "The rocks are covered with green vegetation.", "type": "attribute", "weight": 2 }, { "content": "The water rushes through the gaps between the rocks, creating a spectacular waterfall.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move upward, settling on a panoramic view of the waterfall, which cascades down from a high point emitting water spray.", "visual_elements": [ { "content": "The camera continues to move upward.", "type": "camera", "weight": 3 }, { "content": "The camera settles on a panoramic view of the waterfall.", "type": "camera", "weight": 3 }, { "content": "The waterfall cascades down from a high point, emitting a mist of water.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0708", "video_path": "VIDGEN-1M/14PgjuJn1J8-Scene-0091.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Sports Activity", "duration": 15.08, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a skier dressed in a red top and black pants. The skier is wearing a black helmet and yellow gloves, and is adjusting the helmet. The backdrop is a snowy landscape with a sparse forest on it.\nThe camera cuts to a scene of the skier beginning to glide. The skier stands on the skis, leaning slightly forward with hands hanging naturally. The skier glides forward along the snow track, with rolling mountains visible in the distance. The camera follows the skier, staying at the skier's side and slightly behind.\nAs the skier reaches the first slope, he lightly jump into the air, spinning twice before landing. Upon landing, the skier loses balance briefly, touching the ground with their hands for support before straightening up and continuing to glide forward. Soon after, the skier reaches the second slope and jumps again, completing a rotation in the air before landing smoothly and continuing their glide.", "events": [ { "event": "At the beginning of the video, the camera focuses on a skier dressed in a red top and black pants. The skier is wearing a black helmet and yellow gloves, and is adjusting the helmet. The backdrop is a snowy landscape with a sparse forest on it.", "visual_elements": [ { "content": "The camera focuses on a skier dressed in a red top and black pants.", "type": "camera", "weight": 3 }, { "content": "The skier is wearing a black helmet and yellow gloves.", "type": "attribute", "weight": 3 }, { "content": "The skier is adjusting the helmet.", "type": "action", "weight": 3 }, { "content": "The backdrop is a snowy landscape.", "type": "scene", "weight": 2 }, { "content": "A sparse forest on the snowy landscape.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a scene of the skier beginning to glide. The skier stands on the skis, leaning slightly forward with hands hanging naturally. The skier glides forward along the snow track, with rolling mountains visible in the distance. The camera follows the skier, staying at the skier's side and slightly behind.", "visual_elements": [ { "content": "The camera cuts to a scene of the skier beginning to glide.", "type": "camera", "weight": 3 }, { "content": "The skier stands on the skis.", "type": "attribute", "weight": 3 }, { "content": "The skier is leaning slightly forward with hands hanging naturally.", "type": "attribute", "weight": 3 }, { "content": "The skier glides forward along the snow track.", "type": "action", "weight": 3 }, { "content": "Rolling mountains visible can be seen in the distance.", "type": "scene", "weight": 2 }, { "content": "The camera follows the skier, staying at the skier's side and slightly behind.", "type": "camera", "weight": 3 } ] }, { "event": "As the skier reaches the first slope, he lightly jump into the air, spinning twice before landing. Upon landing, the skier loses balance briefly, touching the ground with their hands for support before straightening up and continuing to glide forward. Soon after, the skier reaches the second slope and jumps again, completing a rotation in the air before landing smoothly and continuing their glide.", "visual_elements": [ { "content": "As the skier reaches the first slope.", "type": "action", "weight": 1 }, { "content": "The skier lightly jump into the air, spinning twice before landing.", "type": "action", "weight": 3 }, { "content": "Upon landing, the skier loses balance briefly.", "type": "action", "weight": 2 }, { "content": "The skier is touching the ground with their hands for support before straightening up and continuing to glide forward.", "type": "action", "weight": 3 }, { "content": "The skier reaches the second slope.", "type": "action", "weight": 1 }, { "content": "The skier jumps again, completing a rotation in the air.", "type": "action", "weight": 3 }, { "content": "The skier is landing smoothly and continuing their glide.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0709", "video_path": "VIDGEN-1M/1QS1EzGD1Mk-Scene-0011.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 5.8, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the side of a tank, which is traveling through the desert. The tracked portion of the tank is clearly visible. There is a distinctive white five-pointed star logo on the tank's body.\nThe camera follows the tank's direction of travel as it slowly moves to the right, with green palm trees and low bushes visible in the background, and rolling sand dunes and blue sky in the distance.\nThe camera continues to move slowly to the right following the tank's direction of advance, and the camera gradually shifts to the right front of the tank, showing the long straightened barrel of the tank and more details of the tank. The tracks run over the sand in a shower of black dust.", "events": [ { "event": "The video begins with the camera focusing on the side of a tank, which is traveling through the desert. The tracked portion of the tank is clearly visible. There is a distinctive white five-pointed star logo on the tank's body.", "visual_elements": [ { "content": "The camera is on the side of a tank.", "type": "camera", "weight": 3 }, { "content": "The tank is traveling through the desert.", "type": "action", "weight": 3 }, { "content": "The tracks of the tank are clearly visible.", "type": "attribute", "weight": 2 }, { "content": "There is a distinctive white five-pointed star on the tank's body.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera follows the tank's direction of travel as it slowly moves to the right, with green palm trees and low bushes visible in the background, and rolling sand dunes and blue sky in the distance.", "visual_elements": [ { "content": "The camera follows the tank's direction of travel as it slowly moves to the right.", "type": "camera", "weight": 3 }, { "content": "In the background, green palm trees and low bushes can be seen.", "type": "scene", "weight": 2 }, { "content": "In the distance are rolling sand dunes and a blue sky.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move slowly to the right following the tank's direction of advance, and the camera gradually shifts to the right front of the tank, showing the long straightened barrel of the tank and more details of the tank. The tracks run over the sand in a shower of black dust.", "visual_elements": [ { "content": "The camera continues to move slowly to the right, following the direction of the tank.", "type": "camera", "weight": 3 }, { "content": "The camera gradually shifts to the right front of the tank.", "type": "camera", "weight": 3 }, { "content": "The image shows the long, straightened barrel of the tank and more details of the tank.", "type": "attribute", "weight": 3 }, { "content": "The tracks run over the sand, sending up a cloud of black dust.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0710", "video_path": "VIDGEN-1M/1j3fvYmjE30-Scene-0152.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 4.07, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an indoor skateboarding park. The venue is spacious with a smooth floor, and the walls are adorned with skateboards of various colors and designs. On the left side of the skateboarding park, there are small ramps and a set of stairs. In the middle, there is a larger platform where a person dressed in all black stands. On the wall behind the scene, there is a black circular logo on the right and a slogan with a black background and white letters on the left.\nA male skateboarder wearing a gray T-shirt and light-colored pants appears on the right side of the scene. He is wearing a white hat and has a skateboard under his feet as he glides forward, gradually approaching the camera. The camera follows his movements and rotates to the left. After skating over a small ramp, the skateboarder quickly jumps into the air, flipping the skateboard in mid-air. As a result, the skateboard lands with its wheels facing upwards. The skateboarder loses balance and steps away from the skateboard, taking a few steps forward.\nThe camera continues to rotate to the left, revealing two males on the left side of the platform. One male, wearing a black wool cap and a gray short-sleeved shirt, is seated at the edge of the platform. Behind the male in the gray short-sleeved shirt, another male stands and is looking down at an electronic device he is holding.", "events": [ { "event": "At the beginning of the video, the camera focuses on an indoor skateboarding park. The venue is spacious with a smooth floor, and the walls are adorned with skateboards of various colors and designs. On the left side of the skateboarding park, there are small ramps and a set of stairs. In the middle, there is a larger platform where a person dressed in all black stands. On the wall behind the scene, there is a black circular logo on the right and a slogan with a black background and white letters on the left.", "visual_elements": [ { "content": "The camera focuses on an indoor skateboarding park.", "type": "camera", "weight": 3 }, { "content": "The venue is spacious with a smooth floor.", "type": "attribute", "weight": 2 }, { "content": "The walls are adorned with skateboards of various colors and designs.", "type": "scene", "weight": 2 }, { "content": "On the left side of the skateboarding park, there are small ramps and a set of stairs.", "type": "attribute", "weight": 2 }, { "content": "In the middle, there is a larger platform.", "type": "attribute", "weight": 3 }, { "content": "A person dressed in all black stands on the platform.", "type": "attribute", "weight": 2 }, { "content": "On the wall behind the scene, there is a black circular logo on the right.", "type": "scene", "weight": 2 }, { "content": "On the wall behind the scene, there is a slogan with a black background and white letters on the left.", "type": "scene", "weight": 2 } ] }, { "event": "A male skateboarder wearing a gray T-shirt and light-colored pants appears on the right side of the scene. He is wearing a white hat and has a skateboard under his feet as he glides forward, gradually approaching the camera. The camera follows his movements and rotates to the left. After skating over a small ramp, the skateboarder quickly jumps into the air, flipping the skateboard in mid-air. As a result, the skateboard lands with its wheels facing upwards. The skateboarder loses balance and steps away from the skateboard, taking a few steps forward.", "visual_elements": [ { "content": "A male skateboarder wearing a gray T-shirt and light-colored pants appears on the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "He is wearing a white hat and has a skateboard under his feet.", "type": "attribute", "weight": 3 }, { "content": "A skateboarder glides forward, gradually approaching the camera.", "type": "action", "weight": 3 }, { "content": "The camera follows his movements and rotates to the left.", "type": "camera", "weight": 3 }, { "content": "After skating over a small ramp, the skateboarder quickly jumps into the air, flipping the skateboard in mid-air.", "type": "action", "weight": 3 }, { "content": "As a result, the skateboard lands with its wheels facing upwards.", "type": "action", "weight": 3 }, { "content": "The skateboarder loses balance and steps away from the skateboard, taking a few steps forward.", "type": "action", "weight": 3 } ] }, { "event": "The camera continues to rotate to the left, revealing two males on the left side of the platform. One male, wearing a black wool cap and a gray short-sleeved shirt, is seated at the edge of the platform. Behind the male in the gray short-sleeved shirt, another male stands and is looking down at an electronic device he is holding.", "visual_elements": [ { "content": "The camera continues to rotate to the left.", "type": "camera", "weight": 3 }, { "content": "Two males on the left side of the platform.", "type": "attribute", "weight": 2 }, { "content": "One male, wearing a black wool cap and a gray short-sleeved shirt, is seated at the edge of the platform.", "type": "attribute", "weight": 2 }, { "content": "Behind the male in the gray short-sleeved shirt, another male stands and is looking down at an electronic device he is holding.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0711", "video_path": "VIDGEN-1M/26K6xY31x5M-Scene-0084.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 5.71, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a brown and white puppy. The puppy's body is mainly brown, with noticeable white hair on its back. The puppy is lying on a white, wrinkled sheet. The puppy's head is facing the right side of the frame, sniffing something. There is hair of another white animal on the right side of the frame.\nThe camera moves slightly to the lower left, revealing the dog's underside. The dog moves backwards, pushing a white toy with its mouth.\nThe camera is slightly tilted upward. The dog picks up the toy and stands up and walks to the right of the screen. The camera remains above the dog.", "events": [ { "event": "The video begins with the camera focused on a brown and white puppy. The puppy's body is mainly brown, with noticeable white hair on its back. The puppy is lying on a white, wrinkled sheet. The puppy's head is facing the right side of the frame, sniffing something. There is hair of another white animal on the right side of the frame.", "visual_elements": [ { "content": "The camera is focused on a brown and white puppy.", "type": "camera", "weight": 3 }, { "content": "The puppy's body is mainly brown, with noticeable white hair on its back.", "type": "attribute", "weight": 2 }, { "content": "The puppy is lying on a white, wrinkled sheet.", "type": "attribute", "weight": 2 }, { "content": "The puppy's head is facing the right side of the frame, sniffing something.", "type": "action", "weight": 3 }, { "content": "There is hair of another white animal on the right side of the frame.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves slightly to the lower left, revealing the dog's underside. The dog moves backwards, pushing a white toy with its mouth.", "visual_elements": [ { "content": "The camera moves slightly to the lower left.", "type": "camera", "weight": 3 }, { "content": "The object under the dog is revealed.", "type": "attribute", "weight": 2 }, { "content": "The dog moves back and plays with a white toy by pushing it with its mouth.", "type": "action", "weight": 3 } ] }, { "event": "The camera is slightly tilted upward. The dog picks up the toy and stands up and walks to the right of the screen. The camera remains above the dog.", "visual_elements": [ { "content": "The camera moves slightly upward.", "type": "camera", "weight": 3 }, { "content": "The dog picks up the toy and stands up and walks to the right side of the frame.", "type": "action", "weight": 3 }, { "content": "The camera remains above the dog.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0712", "video_path": "VIDGEN-1M/2UdLi3nualQ-Scene-0012.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 11.54, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a women's basketball game on the basketball court. One team is wearing white jerseys, and the other team is wearing orange jerseys. The camera moves to the left, and the players of the white team are holding their arms high in defense in front of the referee's seat, while the players of the orange team are patting the basketball to advance. There are referee seats and substitutes' seats on the side of the basketball court, and the coaches and substitutes are sitting on the substitutes' seats.\nWhen the orange team players are patting the basketball against the white team players, another white team player runs from the right side of the screen and grabs the basketball. The camera moves to the right with the white team player, and she pats the ball and runs to the orange team's goal to shoot and successfully shoots. The players on the court rush forward, and the substitutes cheer for the team on the bench. After the white team player scores, he then hits the basketball hoop again, and the ball falls into the hands of the orange team player. The game continues, and the orange team player is about to serve, and the other players stand apart to prepare to grab the ball.", "events": [ { "event": "At the beginning of the video, the camera is focused on a women's basketball game on the basketball court. One team is wearing white jerseys, and the other team is wearing orange jerseys. The camera moves to the left, and the players of the white team are holding their arms high in defense in front of the referee's seat, while the players of the orange team are patting the basketball to advance. There are referee seats and substitutes' seats on the side of the basketball court, and the coaches and substitutes are sitting on the substitutes' seats.", "visual_elements": [ { "content": "The camera is focused on a women's basketball game on the basketball court.", "type": "camera", "weight": 3 }, { "content": "One team is wearing white jerseys.", "type": "attribute", "weight": 2 }, { "content": "The other team is wearing orange jerseys.", "type": "attribute", "weight": 2 }, { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The players of the white team are raising their arms to defend in front of the referee's seat.", "type": "action", "weight": 3 }, { "content": "The players of the orange team are patting the basketball to advance.", "type": "action", "weight": 3 }, { "content": "There are referee's seats and substitutes' seats on the side of the basketball court.", "type": "scene", "weight": 2 }, { "content": "The coach and substitutes are sitting on the bench.", "type": "attribute", "weight": 2 } ] }, { "event": "When the orange team players are patting the basketball against the white team players, another white team player runs from the right side of the screen and grabs the basketball. The camera moves to the right with the white team player, and she pats the ball and runs to the orange team's goal to shoot and successfully shoots. The players on the court rush forward, and the substitutes cheer for the team on the bench. After the white team player scores, he then hits the basketball hoop again, and the ball falls into the hands of the orange team player. The game continues, and the orange team player is about to serve, and the other players stand apart to prepare to grab the ball.", "visual_elements": [ { "content": "When the orange team player is slapping the basketball against the white team player, another white team player runs from the right side of the screen and snatches the basketball.", "type": "action", "weight": 2 }, { "content": "The camera moves to the right with the white team player.", "type": "camera", "weight": 3 }, { "content": "The white team player pats the ball and runs to the orange team's goal to shoot and successfully makes the shot.", "type": "action", "weight": 3 }, { "content": "The players on the court rushes forward.", "type": "attribute", "weight": 1 }, { "content": "The substitute players cheers for the team on the bench.", "type": "attribute", "weight": 1 }, { "content": "The white team player makes the shot and then shot the basketball hoop again.", "type": "action", "weight": 3 }, { "content": "The ball falls into the hands of the orange team player.", "type": "action", "weight": 2 }, { "content": "The orange team player is about to serve the ball.", "type": "action", "weight": 2 }, { "content": "The other players stand apart to prepare to grab the ball.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0713", "video_path": "VIDGEN-1M/2YOR3X2xikQ-Scene-0157.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 11.93, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on three middle-aged men sitting on the ground. In the background is a large aquarium with various fish swimming in it, some large and some small, in different colors, such as white, red, and black. The background of the aquarium is blue water with some water plants in it. The man on the left side of the screen is wearing a black jacket, the man on the right is wearing a gray T-shirt, and the man in the middle is wearing a red baseball cap and a black T-shirt with a circular pattern on it. On the right side of the screen, behind the men, there is a green bottle, a cattail mat, and a white wire on the ground.\nThen, the camera moves to the right. The man on the left side of the screen is talking, while the men in the middle and right side are sitting cross-legged, listening to the man on the left side of the screen talking.\nThen, the camera moves to the left and focuses on the man on the far left of the screen. The man points to the man in the middle with his right hand while talking. The man on the right of the screen disappears from the camera.\nFinally, the camera moves back, distancing itself from the man on the left side of the frame.", "events": [ { "event": "At the beginning of the video, the camera is focused on three middle-aged men sitting on the ground. In the background is a large aquarium with various fish swimming in it, some large and some small, in different colors, such as white, red, and black. The background of the aquarium is blue water with some water plants in it. The man on the left side of the screen is wearing a black jacket, the man on the right is wearing a gray T-shirt, and the man in the middle is wearing a red baseball cap and a black T-shirt with a circular pattern on it. On the right side of the screen, behind the men, there is a green bottle, a cattail mat, and a white wire on the ground.", "visual_elements": [ { "content": "The camera is focused on three middle-aged men sitting on the ground.", "type": "camera", "weight": 3 }, { "content": "The background is a large aquarium with various fish swimming in it.", "type": "scene", "weight": 3 }, { "content": "The fish are big and small, and of different colors, including white, red, black, etc.", "type": "scene", "weight": 2 }, { "content": "The background of the aquarium is blue water with some water plants in it.", "type": "scene", "weight": 1 }, { "content": "The man on the left side of the screen is wearing a black jacket, and the man on the right side is wearing a gray T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The man in the middle is wearing a red baseball cap and a black T-shirt with a circular pattern on it.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the screen, there is a green bottle, a cattail mat, and a white wire on the ground behind the man.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves to the right. The man on the left side of the screen is talking, while the men in the middle and right side are sitting cross-legged, listening to the man on the left side of the screen talking.", "visual_elements": [ { "content": "Then, the camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The man on the left side of the screen is talking.", "type": "action", "weight": 2 }, { "content": "The men in the middle and on the right side are sitting cross-legged, listening to the man on the left side of the screen talking.", "type": "action", "weight": 2 } ] }, { "event": "Then, the camera moves to the left and focuses on the man on the far left of the screen. The man points to the man in the middle with his right hand while talking. The man on the right of the screen disappears from the camera.", "visual_elements": [ { "content": "Then, the camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the man on the far left of the screen.", "type": "camera", "weight": 3 }, { "content": "The man points to the man in the middle with his right hand while talking.", "type": "action", "weight": 3 }, { "content": "The man on the right side of the screen disappears from the camera.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera moves back, distancing itself from the man on the left side of the frame.", "visual_elements": [ { "content": "Finally, the camera moves back.", "type": "camera", "weight": 3 }, { "content": "The camera distances itself from the man on the left side of the screen.", "type": "camera", "weight": 1 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0714", "video_path": "VIDGEN-1M/2dqtKtQzlz4-Scene-0059.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 10.91, "resolution": { "width": 1280, "height": 720 }, "caption": "At the start of the video, the camera aims at a bustling street, with upscale shops brightly lit up on the right side. The shop signs such as \"VALENTINO\" and \"BOTTEGA VENETA\" are clearly visible. The camera zooms out, capturing a red car driving away from the shop entrance and heading towards the left side of the scene.\nThe camera first pans slightly to the right before continuing to pan left. A black car in front of the BOTTEGA VENETA shop also drives towards the left side of the scene. Palm trees on the left side of the street are wrapped in dazzling lights, and shops on the left side of the street also appear in the scene. A man and a woman walk hand in hand on the pedestrian street in front of the shops, with many green indicator lights in the background.", "events": [ { "event": "At the start of the video, the camera aims at a bustling street, with upscale shops brightly lit up on the right side. The shop signs such as \"VALENTINO\" and \"BOTTEGA VENETA\" are clearly visible. The camera zooms out, capturing a red car driving away from the shop entrance and heading towards the left side of the scene.", "visual_elements": [ { "content": "The camera focuses on a bustling street.", "type": "camera", "weight": 3 }, { "content": "On the right side of the street are upscale shops brightly lit up.", "type": "attribute", "weight": 2 }, { "content": "The shop signs such as \"VALENTINO\" and \"BOTTEGA VENETA\" are clearly visible.", "type": "attribute", "weight": 2 }, { "content": "The camera zooms out.", "type": "camera", "weight": 3 }, { "content": "A red car drives away from the shop entrance.", "type": "action", "weight": 2 }, { "content": "The car heads towards the left side of the scene.", "type": "action", "weight": 2 } ] }, { "event": "The camera first pans slightly to the right before continuing to pan left. A black car in front of the BOTTEGA VENETA shop also drives towards the left side of the scene. Palm trees on the left side of the street are wrapped in dazzling lights, and shops on the left side of the street also appear in the scene. A man and a woman walk hand in hand on the pedestrian street in front of the shops, with many green indicator lights in the background.", "visual_elements": [ { "content": "The camera first pans slightly to the right before continuing to pan left.", "type": "camera", "weight": 3 }, { "content": "A black car in front of the BOTTEGA VENETA shop also drives towards the left side of the scene.", "type": "action", "weight": 3 }, { "content": "Palm trees on the left side of the street are wrapped in dazzling lights.", "type": "attribute", "weight": 2 }, { "content": "Shops on the left side of the street also appear in the scene.", "type": "attribute", "weight": 1 }, { "content": "A man and a woman  walk hand in hand on the pedestrian street in front of the shops.", "type": "action", "weight": 1 }, { "content": "There are many green indicator lights in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0715", "video_path": "VIDGEN-1M/3I3nZRicdNo-Scene-0015.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 10.32, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the screen shows four players playing a mixed doubles game on a tennis court. The background is a blue hard court, and the audience sits in the stands to watch the game. There are \"KIA\" and \"Jacob's Creek\" logos on the billboards around.\nIn the picture, the female player on the left side of the picture with her back to the camera is wearing a light blue tennis skirt and a white sun hat, and the player on the right side of the picture with her back to the camera is wearing a white top, blue shorts, and a white cap. The female player on the opposite side is wearing black shorts, a blue top, and a white cap, and the male player on the opposite side is wearing a white top and black shorts. Behind them stand two referees wearing yellow tops, white bottoms, and yellow caps. The two players close to the camera stand in front of the net and near the baseline, respectively, ready to receive the ball.\nThe camera zooms out, and the male player on the opposite side hits the tennis ball over. The female player on the left side close to the camera moves quickly and swings the racket to hit the ball to the opposite court. The male and female players on the opposite side fail to receive the ball. Then the camera zooms in to close up the male and female players who have successfully received the ball. The male player gives the female player a thumbs up, and the female player runs over to clap and high-five the male player.", "events": [ { "event": "At the beginning of the video, the screen shows four players playing a mixed doubles game on a tennis court. The background is a blue hard court, and the audience sits in the stands to watch the game. There are \"KIA\" and \"Jacob's Creek\" logos on the billboards around.", "visual_elements": [ { "content": "The screen shows four players playing a mixed doubles match on a tennis court.", "type": "attribute", "weight": 1 }, { "content": "The background is a blue hard court.", "type": "scene", "weight": 2 }, { "content": "The audience sits in the stands and watches the match.", "type": "attribute", "weight": 2 }, { "content": "The surrounding billboards have the logos of \"KIA\" and \"Jacob's Creek\".", "type": "attribute", "weight": 2 } ] }, { "event": "In the picture, the female player on the left side of the picture with her back to the camera is wearing a light blue tennis skirt and a white sun hat, and the player on the right side of the picture with her back to the camera is wearing a white top, blue shorts, and a white cap. The female player on the opposite side is wearing black shorts, a blue top, and a white cap, and the male player on the opposite side is wearing a white top and black shorts. Behind them stand two referees wearing yellow tops, white bottoms, and yellow caps. The two players close to the camera stand in front of the net and near the baseline, respectively, ready to receive the ball.", "visual_elements": [ { "content": "The female player on the left side of the screen, with her back to the camera, is wearing a light blue tennis skirt and a white sun hat.", "type": "attribute", "weight": 3 }, { "content": "The male player on the right side of the screen, with his back to the camera, is wearing a white top, blue shorts, and a white cap.", "type": "attribute", "weight": 3 }, { "content": "The female player on the opposite side is wearing black shorts, a blue top, and a white cap.", "type": "attribute", "weight": 3 }, { "content": "The male player on the opposite side is wearing a white top and black shorts.", "type": "attribute", "weight": 3 }, { "content": "Behind the male and female players on the opposite side stand two referees wearing yellow tops, white bottoms, and yellow caps.", "type": "attribute", "weight": 3 }, { "content": "The two players close to the camera stand in front of the net and near the baseline, respectively, ready to receive the ball.", "type": "action", "weight": 3 } ] }, { "event": "The camera zooms out, and the male player on the opposite side hits the tennis ball over. The female player on the left side close to the camera moves quickly and swings the racket to hit the ball to the opposite court. The male and female players on the opposite side fail to receive the ball. Then the camera zooms in to close up the male and female players who have successfully received the ball. The male player gives the female player a thumbs up, and the female player runs over to clap and high-five the male player.", "visual_elements": [ { "content": "The camera zooms out.", "type": "camera", "weight": 3 }, { "content": "The male player on the opposite side hits the tennis ball.", "type": "action", "weight": 3 }, { "content": "The female player on the left side, close to the camera, moves quickly and swings her racket to hit the ball to the opposite court.", "type": "action", "weight": 3 }, { "content": "The male and female players on the opposite side fail to receive the ball.", "type": "action", "weight": 2 }, { "content": "The camera zooms in to show the male and female players who successfully receive the ball.", "type": "camera", "weight": 3 }, { "content": "The male player gives the female player a thumbs up.", "type": "action", "weight": 2 }, { "content": "The female player runs over and claps and high-fives the male player.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0716", "video_path": "VIDGEN-1M/3IgL-BZFSVs-Scene-0014.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 34.97, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a white man sitting at a table. He has a beard and white hair, wears a red zip-up sweater and a gray inner layer, and sits at a yellow table with computer equipment. There is a monitor on the table, and the screen shows a virtual outdoor scene with grass and buildings. Next to the monitor is a computer host equipped with an EVGA GeForce GTX 1080 Ti graphics card. The host is black with red stripes. The background is a technical studio with black-framed windows and a light above the window. In the upper right corner of the picture, there is a black lighting device in front of the table, and behind the table are various computer hardware and accessories. Boxes and cartons of various sizes are neatly placed on the right side of the picture.\nThe camera moves slightly to the right of the screen, focusing on the man. The man talks to the camera and gestures, occasionally pointing to the content on the monitor. The monitor shows different game scenes, including windmills, buildings, and night scenes.", "events": [ { "event": "At the beginning of the video, the camera is focused on a white man sitting at a table. He has a beard and white hair, wears a red zip-up sweater and a gray inner layer, and sits at a yellow table with computer equipment. There is a monitor on the table, and the screen shows a virtual outdoor scene with grass and buildings. Next to the monitor is a computer host equipped with an EVGA GeForce GTX 1080 Ti graphics card. The host is black with red stripes. The background is a technical studio with black-framed windows and a light above the window. In the upper right corner of the picture, there is a black lighting device in front of the table, and behind the table are various computer hardware and accessories. Boxes and cartons of various sizes are neatly placed on the right side of the picture.", "visual_elements": [ { "content": "The camera is focused on a white man sitting at a table.", "type": "camera", "weight": 3 }, { "content": "The man has a beard and white hair, and wears a red zip-up sweater and a gray inner layer.", "type": "attribute", "weight": 3 }, { "content": "The man sits at a yellow table with computer equipment.", "type": "action", "weight": 3 }, { "content": "There is a monitor on the table.", "type": "attribute", "weight": 2 }, { "content": "The screen shows a virtual outdoor scene with grass and buildings.", "type": "attribute", "weight": 1 }, { "content": "Next to the monitor is a computer host equipped with an EVGA GeForce GTX 1080 Ti graphics card.", "type": "attribute", "weight": 1 }, { "content": "The host is black with red stripes.", "type": "attribute", "weight": 1 }, { "content": "The background is a technical studio.", "type": "scene", "weight": 3 }, { "content": "The studio has black-framed windows with lights on above the windows.", "type": "scene", "weight": 1 }, { "content": "In the upper right corner of the screen, there is a black lighting device in front of the table.", "type": "scene", "weight": 1 }, { "content": "Behind the table are various computer hardware and accessories.", "type": "scene", "weight": 1 }, { "content": "Boxes and cartons of various sizes are neatly placed on the right side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves slightly to the right of the screen, focusing on the man. The man talks to the camera and gestures, occasionally pointing to the content on the monitor. The monitor shows different game scenes, including windmills, buildings, and night scenes.", "visual_elements": [ { "content": "The camera moves slightly to the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the man.", "type": "camera", "weight": 3 }, { "content": "The man talks to the camera and gestures, pointing to the content on the monitor from time to time.", "type": "action", "weight": 3 }, { "content": "The monitor shows different game scenes, including windmills, buildings, and night scenes.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0717", "video_path": "VIDGEN-1M/3oT2mTGxyXQ-Scene-0025.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 7.71, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a pot of spareribs that are being stewed. The reddish-brown broth in the pot is boiling and giving off white steam.\nThe camera moves upward. In the picture, a large number of spareribs can be seen piled up together, with their surfaces covered by reddish-brown seasonings. The color of the spareribs is reddish-brown.", "events": [ { "event": "At the beginning of the video, the camera is focused on a pot of spareribs that are being stewed. The reddish-brown broth in the pot is boiling and giving off white steam.", "visual_elements": [ { "content": "The camera is focused on a pot of spareribs that are being stewed.", "type": "camera", "weight": 3 }, { "content": "The reddish-brown broth in the pot is boiling", "type": "attribute", "weight": 3 }, { "content": "The pot is giving off white steam.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera moves upward. In the picture, a large number of spareribs can be seen piled up together, with their surfaces covered by reddish-brown seasonings. The color of the spareribs is reddish-brown.", "visual_elements": [ { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "In the picture, a large number of spareribs can be seen piled up together.", "type": "scene", "weight": 2 }, { "content": "The spareribs' surfaces are covered by reddish-brown seasonings.", "type": "attribute", "weight": 2 }, { "content": "The color of the spareribs is reddish-brown.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0718", "video_path": "VIDGEN-1M/45YF1-oX5SA-Scene-0025.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 5.71, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a monkey sitting on a branch. The monkey's body is light brown, with a slightly white belly and a distinct dark brown head. The monkey is sitting on the branch and looking up, surrounded by lush green leaves and a white sky in the background.\nThe camera moves to the right and the monkey's eyes look to the upper right side of the screen.\nFinally, the camera moves slightly to the left. The monkey looks toward the camera.", "events": [ { "event": "At the beginning of the video, the camera is focused on a monkey sitting on a branch. The monkey's body is light brown, with a slightly white belly and a distinct dark brown head. The monkey is sitting on the branch and looking up, surrounded by lush green leaves and a white sky in the background.", "visual_elements": [ { "content": "The camera is pointed at a monkey sitting on a branch.", "type": "camera", "weight": 3 }, { "content": "The monkey's body is light brown, with a slightly white belly and a distinct dark brown head.", "type": "attribute", "weight": 3 }, { "content": "The monkey is sitting on a branch and looking up.", "type": "action", "weight": 3 }, { "content": "The monkey is surrounded by dense green leaves.", "type": "attribute", "weight": 3 }, { "content": "The background is a white sky.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the right and the monkey's eyes look to the upper right side of the screen.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The monkey's eyes look to the upper right of the screen.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera moves slightly to the left. The monkey looks toward the camera.", "visual_elements": [ { "content": "Finally, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The monkey looks at the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 9 }, { "index": "TUNA_0719", "video_path": "VIDGEN-1M/4LboBy93rIQ-Scene-0040.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 11.34, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a section of the ocean where the waves are rolling. A male surfer is dressing in a black long-sleeved wetsuit appears in the scene, standing on a pink and white surfboard against the backdrop of the vast ocean and crashing waves.\nThe surfer glides down the wave wall, carving a white trail of spray on the water's surface. The camera follows the surfer's movements to the right. The wave curls up, forming a massive white water curtain. The surfer leans forward, crouching semi-low on the surfboard. He performs a difficult jump on the wave wall and then completes a turn in the spray with both hands gripping the board. Finally, the surfer and the surfboard land steadily back on the water, and the surfer resumes a standing position, leaning slightly forward.", "events": [ { "event": "At the beginning of the video, the camera focuses on a section of the ocean where the waves are rolling. A male surfer is dressing in a black long-sleeved wetsuit appears in the scene, standing on a pink and white surfboard against the backdrop of the vast ocean and crashing waves.", "visual_elements": [ { "content": "The camera focuses on a section of the ocean where the waves are rolling.", "type": "camera", "weight": 3 }, { "content": "A male surfer is dressing in a black long-sleeved wetsuit appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "He is standing on a pink and white surfboard.", "type": "attribute", "weight": 2 }, { "content": "The background is the vast ocean and crashing waves.", "type": "scene", "weight": 2 } ] }, { "event": "The surfer glides down the wave wall, carving a white trail of spray on the water's surface. The camera follows the surfer's movements to the right. The wave curls up, forming a massive white water curtain. The surfer leans forward, crouching semi-low on the surfboard. He performs a difficult jump on the wave wall and then completes a turn in the spray with both hands gripping the board. Finally, the surfer and the surfboard land steadily back on the water, and the surfer resumes a standing position, leaning slightly forward.", "visual_elements": [ { "content": "The surfer glides down the wave wall.", "type": "action", "weight": 3 }, { "content": "The surfboard is carving a white trail of spray on the water's surface.", "type": "action", "weight": 2 }, { "content": "The camera follows the surfer's movements to the right.", "type": "camera", "weight": 3 }, { "content": "The wave curls up, forming a massive white water curtain.", "type": "scene", "weight": 2 }, { "content": "The surfer leans forward, crouching semi-low on the surfboard.", "type": "action", "weight": 3 }, { "content": "The surfer performs a difficult jump on the wave wall.", "type": "action", "weight": 3 }, { "content": "The surfer then completes a turn in the spray with both hands gripping the board.", "type": "action", "weight": 3 }, { "content": "Finally, the surfer and the surfboard land steadily back on the water.", "type": "action", "weight": 3 }, { "content": "the surfer resumes a standing position, leaning slightly forward.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0720", "video_path": "VIDGEN-1M/4Lzdw0FRljM-Scene-0033.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 11.04, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is shooting down at a football field from above, where multiple players are engaged in a match. A referee dressed in a yellow referee uniform is visible in the center of the field, surrounded by players wearing white jerseys and red shorts on one side and players wearing purple jerseys on the other. The boundaries of the field are lined with advertisement boards. In the background, the stands are filled with spectators who are watching the game.\nThe camera pans the players' movements to the right, capturing a scene where a player dressed in a purple jersey kicks the ball into the penalty area. Inside the penalty area, a player in a purple jersey and a player in a red jersey are scrambling to intercept the football. One of them headers the ball to another part of the penalty area.\nThe camera zooms in as a player in a purple jersey intercepts the ball and kicks it towards the goal. Players in white jerseys and purple jerseys are all running towards the ball. Despite being intercepted by a player in a white jersey, the purple jersey player successfully kicks the ball into the goal. The goalkeeper in a green jersey dives to the ground, attempting to block the ball.\nThe camera cuts to a new scene, focusing again on the moment when the purple jersey player kicks the ball towards the goal. Subsequently, another purple jersey player successfully kicks the ball into the net.", "events": [ { "event": "At the beginning of the video, the camera is shooting down at a football field from above, where multiple players are engaged in a match. A referee dressed in a yellow referee uniform is visible in the center of the field, surrounded by players wearing white jerseys and red shorts on one side and players wearing purple jerseys on the other. The boundaries of the field are lined with advertisement boards. In the background, the stands are filled with spectators who are watching the game.", "visual_elements": [ { "content": "The camera is shooting down at a football field from above.", "type": "camera", "weight": 3 }, { "content": "multiple players are engaged in a match on the football field.", "type": "attribute", "weight": 3 }, { "content": "A referee dressed in a yellow referee uniform is visible in the center of the field", "type": "attribute", "weight": 3 }, { "content": "Surrounded by players wearing white jerseys and red shorts on one side and players wearing purple jerseys on the other.", "type": "attribute", "weight": 3 }, { "content": "The boundaries of the field are lined with advertisement boards.", "type": "scene", "weight": 2 }, { "content": "In the background, the stands are filled with spectators who are watching the game.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans the players' movements to the right, capturing a scene where a player dressed in a purple jersey kicks the ball into the penalty area. Inside the penalty area, a player in a purple jersey and a player in a red jersey are scrambling to intercept the football. One of them headers the ball to another part of the penalty area.", "visual_elements": [ { "content": "The camera pans the players' movements to the right.", "type": "camera", "weight": 3 }, { "content": "A scene where a player dressed in a purple jersey kicks the ball into the penalty area.", "type": "action", "weight": 3 }, { "content": "Inside the penalty area, a player in a purple jersey and a player in a red jersey are scrambling to intercept the football.", "type": "action", "weight": 3 }, { "content": "One of them headers the ball to another part of the penalty area.", "type": "action", "weight": 3 } ] }, { "event": "The camera zooms in as a player in a purple jersey intercepts the ball and kicks it towards the goal. Players in white jerseys and purple jerseys are all running towards the ball. Despite being intercepted by a player in a white jersey, the purple jersey player successfully kicks the ball into the goal. The goalkeeper in a green jersey dives to the ground, attempting to block the ball.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "A player in a purple jersey intercepts the ball.", "type": "action", "weight": 3 }, { "content": "A player in a purple jersey kicks it towards the goal.", "type": "action", "weight": 3 }, { "content": "Players in white jerseys and purple jerseys are all running towards the ball.", "type": "action", "weight": 3 }, { "content": "Despite being intercepted by a player in a white jersey, the purple jersey player successfully kicks the ball into the goal.", "type": "action", "weight": 3 }, { "content": "The goalkeeper in a green jersey dives to the ground, attempting to block the ball.", "type": "action", "weight": 3 } ] }, { "event": "The camera cuts to a new scene, focusing again on the moment when the purple jersey player kicks the ball towards the goal. Subsequently, another purple jersey player successfully kicks the ball into the net.", "visual_elements": [ { "content": "The camera cuts.", "type": "camera", "weight": 3 }, { "content": "The video focusing again on the moment when the purple jersey player kicks the ball towards the goal. Subsequently, another purple jersey player successfully kicks the ball into the net.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0721", "video_path": "VIDGEN-1M/4vtq5BjBV2Q-Scene-0022.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 11.48, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, there are several players on the ice hockey rink playing fiercely. In the picture, a player wearing a dark jersey and a white helmet is skating quickly and controlling the puck. There is another player on the same team skating to the left and back of the player in the dark jersey. In the picture, a player wearing a red and white jersey falls on the ice and stands up quickly. The audience seats in the background are full of spectators. There are many brand billboards around the venue.\nThe camera mainly shoots the player who controls the puck. The player skating quickly and then quickly hits the ball to a teammate who is close to the opponent's goal. The teammate shoots the puck into the opponent's goal. The goalkeeper wearing a red and white jersey fails to defend. During this period, the camera moves from right to left with the movement of the puck. At the end of the video, the two players in dark jerseys skate to the right, and the two players in red and white jerseys skate towards the camera.", "events": [ { "event": "At the beginning of the video, there are several players on the ice hockey rink playing fiercely. In the picture, a player wearing a dark jersey and a white helmet is skating quickly and controlling the puck. There is another player on the same team skating to the left and back of the player in the dark jersey. In the picture, a player wearing a red and white jersey falls on the ice and stands up quickly. The audience seats in the background are full of spectators. There are many brand billboards around the venue.", "visual_elements": [ { "content": "A hockey rink with several players in a heated game.", "type": "attribute", "weight": 1 }, { "content": "A player wearing a dark-colored jersey and a white helmet is skating fast and controlling the puck.", "type": "action", "weight": 3 }, { "content": "A player on the same team skates behind the dark-colored jersey to the left.", "type": "action", "weight": 2 }, { "content": "A player in a red and white jersey collapses on the ice and then quickly gets up.", "type": "action", "weight": 2 }, { "content": "In the background, the stands are filled with spectators.", "type": "scene", "weight": 2 }, { "content": "There are many billboards around the rink.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera mainly shoots the player who controls the puck. The player skating quickly and then quickly hits the ball to a teammate who is close to the opponent's goal. The teammate shoots the puck into the opponent's goal. The goalkeeper wearing a red and white jersey fails to defend. During this period, the camera moves from right to left with the movement of the puck. At the end of the video, the two players in dark jerseys skate to the right, and the two players in red and white jerseys skate towards the camera.", "visual_elements": [ { "content": "The camera focuses on the player controlling the puck.", "type": "camera", "weight": 3 }, { "content": "The player skates quickly and then quickly knocks the puck out to a teammate who is close to the opposing team's goal.", "type": "action", "weight": 3 }, { "content": "The teammate shoots the puck into the opposing team's goal.", "type": "action", "weight": 3 }, { "content": "The goalie in the red and white jersey fails to defend.", "type": "attribute", "weight": 2 }, { "content": "The camera follows the puck as it moves from right to left.", "type": "camera", "weight": 3 }, { "content": "At the end of the video, the two players in dark-colored jerseys skate to the right.", "type": "action", "weight": 2 }, { "content": "The two players in red and white jerseys skate towards the camera.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0722", "video_path": "VIDGEN-1M/50V1WFmrhG8-Scene-0059.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 18.06, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a man engaged in windsurfing. He is wearing a black sleeveless shirt and black and gray striped shorts. The man stands on a red windsurfing board, holding a yellow windsurfing sail with the words \"Banzai 5.7\" and \"tws-windsurf.com\" printed on it.\nThe camera pans the man as he glides to the left. His body leans backwards and forwards at times, with both hands firmly gripping the sail boom and his feet stable on the windsurfing board. The board carves white trails of spray on the water's surface. The background features blue sea water with gentle waves.\nThe camera continues to pan to the left, and the man flips from one side of the windsurfing board to the other, transitioning from facing the camera to facing away from it. A white waist guard he is wearing comes into view behind him. With both hands tightly gripping the sail boom, the man leans back forcefully, controlling the direction of the windsurfing board.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man engaged in windsurfing. He is wearing a black sleeveless shirt and black and gray striped shorts. The man stands on a red windsurfing board, holding a yellow windsurfing sail with the words \"Banzai 5.7\" and \"tws-windsurf.com\" printed on it.", "visual_elements": [ { "content": "The camera focuses on a man engaged in windsurfing.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black sleeveless shirt and black and gray striped shorts.", "type": "attribute", "weight": 3 }, { "content": "The man stands on a red windsurfing board, holding a yellow windsurfing sail.", "type": "attribute", "weight": 3 }, { "content": "The words \"Banzai 5.7\" and \"tws-windsurf.com\" printed on the sail.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans the man as he glides to the left. His body leans backwards and forwards at times, with both hands firmly gripping the sail boom and his feet stable on the windsurfing board. The board carves white trails of spray on the water's surface. The background features blue sea water with gentle waves.", "visual_elements": [ { "content": "The camera pans the man as he glides to the left.", "type": "camera", "weight": 3 }, { "content": "The man's body leans backwards and forwards at times.", "type": "action", "weight": 3 }, { "content": "The man's both hands firmly gripping the sail boom.", "type": "attribute", "weight": 3 }, { "content": "The man's feet stable on the windsurfing board.", "type": "attribute", "weight": 3 }, { "content": "The board carves white trails of spray on the water's surface.", "type": "scene", "weight": 3 }, { "content": "The background features blue sea water.", "type": "scene", "weight": 2 }, { "content": "There are gentle waves on the sea surface.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to pan to the left, and the man flips from one side of the windsurfing board to the other, transitioning from facing the camera to facing away from it. A white waist guard he is wearing comes into view behind him. With both hands tightly gripping the sail boom, the man leans back forcefully, controlling the direction of the windsurfing board.", "visual_elements": [ { "content": "The camera continues to pan to the left", "type": "camera", "weight": 3 }, { "content": "The man flips from one side of the windsurfing board to the other.", "type": "action", "weight": 3 }, { "content": "The man is transitioning from facing the camera to facing away from it.", "type": "attribute", "weight": 3 }, { "content": "A white waist guard the man is wearing comes into view behind him.", "type": "attribute", "weight": 2 }, { "content": "With the man's both hands tightly gripping the sail boom, the man leans back forcefully, controlling the direction of the windsurfing board.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0723", "video_path": "VIDGEN-1M/549fUF21jrg-Scene-0017.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Sports Activity", "duration": 21.08, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is fixed on the rider, showing the rider's perspective. The rider wears gloves that are white, black, and red, with both hands firmly gripping the handlebars. The motorcycle's dashboard indicates a current speed of 37 kilometers per hour. The road is a wide dirt path, with green vegetation and purple flowers on both sides. In the distance to the left of the road, piles of wood can be seen.\nAs the motorcycle progresses, the speed displayed on the dashboard continues to increase, reaching a maximum of 64 kilometers per hour before gradually slowing down. The scenery on both sides of the path remains largely unchanged, with green vegetation and purple flowers in sight. The piles of wood on the left side of the path fade out of scene, and ahead lies a fork in the path, with a forest serving as the backdrop.\nAs the motorcycle approaches the fork in the road, its speed decreases, and the handlebars start to wobble due to the bumpy and uneven terrain, scattered with rocks and dried branches. Eventually, the motorcycle rotates left, with a minimum speed of 5 kilometers per hour during the turn. Once on the straight stretch, the motorcycle begins to accelerate slowly. At this point, the path is flanked by piles of wood on the left and sparse grassland on the right.", "events": [ { "event": "At the beginning of the video, the camera is fixed on the rider, showing the rider's perspective. The rider wears gloves that are white, black, and red, with both hands firmly gripping the handlebars. The motorcycle's dashboard indicates a current speed of 37 kilometers per hour. The road is a wide dirt path, with green vegetation and purple flowers on both sides. In the distance to the left of the road, piles of wood can be seen.", "visual_elements": [ { "content": "The camera is fixed on the rider, showing the rider's perspective.", "type": "camera", "weight": 3 }, { "content": "The rider wears gloves that are white, black, and red.", "type": "attribute", "weight": 3 }, { "content": "The rider's both hands firmly grip the handlebars.", "type": "attribute", "weight": 3 }, { "content": "The motorcycle's dashboard indicates a current speed of 37 kilometers per hour.", "type": "attribute", "weight": 3 }, { "content": "The path is a wide dirt path.", "type": "attribute", "weight": 2 }, { "content": "Green vegetation and purple flowers on both sides of path.", "type": "scene", "weight": 2 }, { "content": "In the distance to the left of the path, piles of wood can be seen.", "type": "scene", "weight": 2 } ] }, { "event": "As the motorcycle progresses, the speed displayed on the dashboard continues to increase, reaching a maximum of 64 kilometers per hour before gradually slowing down. The scenery on both sides of the path remains largely unchanged, with green vegetation and purple flowers in sight. The piles of wood on the left side of the path fade out of scene, and ahead lies a fork in the path, with a forest serving as the backdrop.", "visual_elements": [ { "content": "As the motorcycle progresses, the speed displayed on the dashboard continues to increase.", "type": "action", "weight": 3 }, { "content": "After the speed reaching a maximum of 64 kilometers per hour before gradually slowing down.", "type": "action", "weight": 3 }, { "content": "The piles of wood on the left side of the path eventually fade out of scene.", "type": "scene", "weight": 1 }, { "content": "Ahead of path lies a fork in the path.", "type": "attribute", "weight": 3 }, { "content": "A forest serving as the backdrop.", "type": "scene", "weight": 1 } ] }, { "event": "As the motorcycle approaches the fork in the road, its speed decreases, and the handlebars start to wobble due to the bumpy and uneven terrain, scattered with rocks and dried branches. Eventually, the motorcycle rotates left, with a minimum speed of 5 kilometers per hour during the turn. Once on the straight stretch, the motorcycle begins to accelerate slowly. At this point, the path is flanked by piles of wood on the left and sparse grassland on the right.", "visual_elements": [ { "content": "As the motorcycle approaches the fork in the path, its speed decreases.", "type": "action", "weight": 3 }, { "content": "The handlebars start to wobble.", "type": "action", "weight": 3 }, { "content": "The bumpy and uneven terrain, scattered with rocks and dried branches.", "type": "attribute", "weight": 2 }, { "content": "Eventually, the motorcycle rotates left.", "type": "action", "weight": 3 }, { "content": "The minimum speed of the motorcycle during the turn at the fork in the road is stated to be 5 kilometers per hour.", "type": "attribute", "weight": 1 }, { "content": "Once on the straight stretch, the motorcycle begins to accelerate slowly.", "type": "action", "weight": 2 }, { "content": "The path is flanked by piles of wood on the left.", "type": "scene", "weight": 1 }, { "content": "The path is sparse grassland on the right.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0724", "video_path": "VIDGEN-1M/5QcL53ULgzY-Scene-0031.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 8.38, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a steaming bowl of ramen. Inside the bowl, there are rich broth, noodles, slices of meat, scallions and some red ingredients. In the center of the picture are a pair of white chopsticks and a black spoon, which are held by a person wearing dark clothes. The whole picture is blurry and some tables and chairs can be seen.\nThe camera gradually zooms in. This person is using the chopsticks and the spoon to mix the noodles so that they are fully coated with the broth. Then this person picks up the noodles with the chopsticks and the camera moves upward.", "events": [ { "event": "At the beginning of the video, the camera is focused on a steaming bowl of ramen. Inside the bowl, there are rich broth, noodles, slices of meat, scallions and some red ingredients. In the center of the picture are a pair of white chopsticks and a black spoon, which are held by a person wearing dark clothes. The whole picture is blurry and some tables and chairs can be seen.", "visual_elements": [ { "content": "The camera is focused on a steaming bowl of ramen.", "type": "camera", "weight": 3 }, { "content": "Inside the bowl, there are rich broth, noodles, slices of meat, scallions and some red ingredients.", "type": "attribute", "weight": 3 }, { "content": "In the center of the picture are a pair of white chopsticks and a black spoon.", "type": "attribute", "weight": 3 }, { "content": "The chopsticks and the spoon are held by a person wearing dark clothes.", "type": "attribute", "weight": 2 }, { "content": "The whole picture is blurry.", "type": "scene", "weight": 1 }, { "content": "Some tables and chairs can be seen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera gradually zooms in. This person is using the chopsticks and the spoon to mix the noodles so that they are fully coated with the broth. Then this person picks up the noodles with the chopsticks and the camera moves upward.", "visual_elements": [ { "content": "The camera gradually zooms in.", "type": "camera", "weight": 3 }, { "content": "This person is using the chopsticks and the spoon to mix the noodles so that they are fully coated with the broth.", "type": "action", "weight": 3 }, { "content": "This person picks up the noodles with the chopsticks.", "type": "action", "weight": 3 }, { "content": "The camera moves upward.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0725", "video_path": "VIDGEN-1M/5UcKsl-0Yic-Scene-0017.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 6.87, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on an older white woman in front of a glass door. The woman is wearing a gray top with a pink circle pattern. The woman wears black-framed glasses and gold earrings, and her hair is white, short and fluffy. There is a black microphone in front of the woman, and her right arm is on her waist while being interviewed. In the background is a glass door with a black frame, and a white \"PULL\" sign is posted on the door frame. The glass door reflects several cars parked in front of the door and some green trees.\nThe camera pans slightly to the right, keeping a close-up shot. The woman is talking to the camera sideways. On the left side of the picture, the glass door reflects the red-roofed house, the vehicles on the road, and some pedestrians.", "events": [ { "event": "At the beginning of the video, the camera is focused on an older white woman in front of a glass door. The woman is wearing a gray top with a pink circle pattern. The woman wears black-framed glasses and gold earrings, and her hair is white, short and fluffy. There is a black microphone in front of the woman, and her right arm is on her waist while being interviewed. In the background is a glass door with a black frame, and a white \"PULL\" sign is posted on the door frame. The glass door reflects several cars parked in front of the door and some green trees.", "visual_elements": [ { "content": "The camera is focused on an older white woman in front of a glass door.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a gray top with a pink circle pattern.", "type": "attribute", "weight": 2 }, { "content": "The woman is wearing black-framed glasses and gold earrings.", "type": "attribute", "weight": 1 }, { "content": "The woman's hair is white, short and fluffy.", "type": "attribute", "weight": 1 }, { "content": "There is a black microphone in front of the woman.", "type": "attribute", "weight": 1 }, { "content": "The woman's right arm is on her waist and she is being interviewed.", "type": "action", "weight": 3 }, { "content": "The background is a glass door with a black frame.", "type": "scene", "weight": 3 }, { "content": "There is a white \"PULL\" sign on the door frame.", "type": "scene", "weight": 1 }, { "content": "The glass door reflects several cars parked in front of the door and some green trees.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans slightly to the right, keeping a close-up shot. The woman is talking to the camera sideways. On the left side of the picture, the glass door reflects the red-roofed house, the vehicles on the road, and some pedestrians.", "visual_elements": [ { "content": "The camera pans slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The camera keeps a close-up shot.", "type": "camera", "weight": 3 }, { "content": "The woman is talking to the camera sideways.", "type": "action", "weight": 2 }, { "content": "On the left side of the picture, the glass door reflects the red-roofed house, the vehicles driving on the road and some pedestrians.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0726", "video_path": "VIDGEN-1M/5jcG3Kdny6E-Scene-0007.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 11.15, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera looks down at the ice hockey rink. There is a team of players wearing yellow jerseys and blue helmets and another team of players wearing light jerseys and white helmets. In the center of the picture, a player in a yellow jersey and a player in a light jersey fall to the ground. The referee, wearing a black and white striped shirt, black trousers and a black cap, stood at the edge of the field. There were substitutes and referees in the background, and billboards of various brands were around the field.\nNext, the camera continued to follow the game from left to right, and the player in the yellow jersey controlled the puck and quickly skated forward. Another player in a light-colored jersey stretched out his stick to block, and the player in the yellow jersey fell to the ground and hit the ball into the opponent's goal while lying on his stomach. After scoring the goal, the player in the yellow jersey who hit the ball slid to his knees on the ground and raised his arms to cheer, and then the other two players also skated over to celebrate together.", "events": [ { "event": "At the beginning of the video, the camera looks down at the ice hockey rink. There is a team of players wearing yellow jerseys and blue helmets and another team of players wearing light jerseys and white helmets. In the center of the picture, a player in a yellow jersey and a player in a light jersey fall to the ground. The referee, wearing a black and white striped shirt, black trousers and a black cap, stood at the edge of the field. There were substitutes and referees in the background, and billboards of various brands were around the field.", "visual_elements": [ { "content": "The camera looks down on the hockey rink.", "type": "camera", "weight": 3 }, { "content": "There is a team of players wearing yellow jerseys and blue helmets and another team wearing light-colored jerseys and white helmets.", "type": "attribute", "weight": 3 }, { "content": "A player in a yellow jersey and a player in a light-colored jersey fall to the ground in the center of the frame.", "type": "action", "weight": 3 }, { "content": "The referee wearing a black and white striped shirt and black pants and a black cap is standing at the edge of the field.", "type": "attribute", "weight": 2 }, { "content": "There are substitutes and referee in the background.", "type": "scene", "weight": 2 }, { "content": "The field is surrounded by billboards of various brands.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera continued to follow the game from left to right, and the player in the yellow jersey controlled the puck and quickly skated forward. Another player in a light-colored jersey stretched out his stick to block, and the player in the yellow jersey fell to the ground and hit the ball into the opponent's goal while lying on his stomach. After scoring the goal, the player in the yellow jersey who hit the ball slid to his knees on the ground and raised his arms to cheer, and then the other two players also skated over to celebrate together.", "visual_elements": [ { "content": "The camera continues to follow the game from left to right.", "type": "camera", "weight": 3 }, { "content": "The player in the yellow jersey controls the puck and quickly skates forward.", "type": "action", "weight": 3 }, { "content": "Another player in a light-colored jersey extends his stick to block the puck.", "type": "action", "weight": 3 }, { "content": "The player in the yellow jersey falls to the ground and hits the puck into the opposing team's goal while lying on his back.", "type": "action", "weight": 3 }, { "content": "After the goal, the player in yellow slides to his knees and raises his arms in the air.", "type": "action", "weight": 3 }, { "content": "The other two players then slide over to celebrate.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0727", "video_path": "VIDGEN-1M/5xUnmyg1Meg-Scene-0086.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 20.03, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a picture of a planter on a balcony with a plant growing in the planter, the plant is green the planter is red, the wall behind the planter is brick red and the back of the balcony is a parking lot.\nThe camera pulls away to the left showing the whole plant and the planter, behind the planter is a black railing, the camera moves to the lower right corner, the camera focuses on the soil in the planter, there are some weeds on the surface of the soil, a hand wearing a blue glove appears in the frame and starts to pull the weeds out of the planter. Finally, the camera moves upward and the scene ends with the hand holding the weeds hanging in mid-air.", "events": [ { "event": "The video begins with a picture of a planter on a balcony with a plant growing in the planter, the plant is green the planter is red, the wall behind the planter is brick red and the back of the balcony is a parking lot.", "visual_elements": [ { "content": "The image is of a flowerpot on a balcony.", "type": "scene", "weight": 3 }, { "content": "A plant growing in the planter.", "type": "attribute", "weight": 3 }, { "content": "The plant is green and the planter is red.", "type": "attribute", "weight": 2 }, { "content": "The wall behind the planter is brick red.", "type": "attribute", "weight": 2 }, { "content": "The back of the balcony is a parking lot.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera pulls away to the left showing the whole plant and the planter, behind the planter is a black railing, the camera moves to the lower right corner, the camera focuses on the soil in the planter, there are some weeds on the surface of the soil, a hand wearing a blue glove appears in the frame and starts to pull the weeds out of the planter. Finally, the camera moves upward and the scene ends with the hand holding the weeds hanging in mid-air.", "visual_elements": [ { "content": "The camera pulls away to the left.", "type": "camera", "weight": 3 }, { "content": "The camera showing the whole plant and the planter", "type": "scene", "weight": 2 }, { "content": "Behind the planter is a black railing", "type": "scene", "weight": 1 }, { "content": "The camera moves to the lower right corner.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the soil in the planter.", "type": "camera", "weight": 3 }, { "content": "There are some weeds on the surface of the soil.", "type": "scene", "weight": 2 }, { "content": "A hand wearing a blue glove appears in the frame.", "type": "action", "weight": 3 }, { "content": "The hand pull the weeds out of the planter.", "type": "action", "weight": 3 }, { "content": "The camera moves upward and the scene.", "type": "camera", "weight": 3 }, { "content": "The frame ends with the hand holding the weeds hanging in mid-air.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0728", "video_path": "VIDGEN-1M/6TNVFNAGgg0-Scene-0046.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 9.68, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is aimed at an indoor volleyball court. There is a volleyball net in the middle of the court. There is a white table with a scoreboard on the innermost side of the volleyball net. The time on the scoreboard is 0:00, and the score between HOME and GUEST is 5:3. Behind the table stands a long-haired woman in a dark dress. In front of her are players wearing red jerseys. On her right are players from another team wearing dark blue jerseys. Both teams are young girls wearing black shorts and black knee pads. Some players wearing dark blue jerseys sit on the benches on the side.\nThe player in red jerseys adjusts the positions to receive the ball. The player in red jersey No. 5, who is farthest from the camera, rushes over and kneels on one knee, hitting the ball with both elbows facing up. The camera moves to the right, and the volleyball jumps over the player in red jersey No. 10 to the player in red jersey No. 11. Player No. 11 stretches out his hands to receive the ball. When the player in red jersey receives the ball, a player in dark blue jersey on the opposite side observes through the volleyball net.", "events": [ { "event": "At the beginning of the video, the camera is aimed at an indoor volleyball court. There is a volleyball net in the middle of the court. There is a white table with a scoreboard on the innermost side of the volleyball net. The time on the scoreboard is 0:00, and the score between HOME and GUEST is 5:3. Behind the table stands a long-haired woman in a dark dress. In front of her are players wearing red jerseys. On her right are players from another team wearing dark blue jerseys. Both teams are young girls wearing black shorts and black knee pads. Some players wearing dark blue jerseys sit on the benches on the side.", "visual_elements": [ { "content": "The camera is aimed at an indoor volleyball court.", "type": "camera", "weight": 3 }, { "content": "There is a volleyball net in the middle of the court.", "type": "attribute", "weight": 2 }, { "content": "There is a white table with a scoreboard on the innermost side of the volleyball net.", "type": "scene", "weight": 2 }, { "content": "The time on the scoreboard is 0:00.", "type": "scene", "weight": 2 }, { "content": "The score between HOME and GUEST is 5:3 below the time.", "type": "scene", "weight": 2 }, { "content": "There is also a long-haired woman in a dark dress standing behind the table.", "type": "attribute", "weight": 2 }, { "content": "In front of the long-haired woman is a player wearing a red jersey.", "type": "attribute", "weight": 3 }, { "content": "To the right of the long-haired woman is a player from another team wearing a dark blue jersey.", "type": "attribute", "weight": 3 }, { "content": "Both teams are young girls, all wearing black shorts and black knee pads.", "type": "attribute", "weight": 2 }, { "content": "Some players wearing dark blue jerseys are sitting on the benches nearby.", "type": "attribute", "weight": 2 } ] }, { "event": "The player in red jerseys adjusts the positions to receive the ball. The player in red jersey No. 5, who is farthest from the camera, rushes over and kneels on one knee, hitting the ball with both elbows facing up. The camera moves to the right, and the volleyball jumps over the player in red jersey No. 10 to the player in red jersey No. 11. Player No. 11 stretches out his hands to receive the ball. When the player in red jersey receives the ball, a player in dark blue jersey on the opposite side observes through the volleyball net.", "visual_elements": [ { "content": "The player in the red jersey adjusts the position to receive the ball.", "type": "action", "weight": 2 }, { "content": "The player in the red jersey No. 5, who is farthest from the camera, rushes over, kneels on one knee, and hits the ball with both elbows facing up.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The volleyball jumps over the player in the red jersey No. 10 to the player in front of the player in the red jersey No. 11.", "type": "action", "weight": 2 }, { "content": "The player in the red jersey No. 11 stretches out his hands to receive the ball.", "type": "action", "weight": 2 }, { "content": "When the player in the red jersey receives the ball, a player in the dark blue jersey opposite observes through the volleyball net.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0729", "video_path": "VIDGEN-1M/7AYf-ChIa5s-Scene-0008.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 8.48, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a man dressed in a red jersey and white shorts. He wears a red baseball cap and black sneakers, and lies flat on the grass. In front of the man is the pitcher's mound of a baseball field, surrounded by green grass. The man is lying with his head close to the edge of the pitcher's mound.\nThe man begins to slowly move his body, lifting his head slightly and using both arms to propel himself forward, gradually approaching the pitcher's mound. In his right hand, he holds a white baseball. The camera follows the man's movements, slowly panning to the left.\nThe camera slightly shakes left and right. The man glances back, transferring the ball from his right hand to his left. He then straightens his upper body and forcefully throws the baseball in his left hand in a low trajectory. Immediately after, he quickly leans down and lies prone on the grass again, with his head resting close to the pitcher's mound.", "events": [ { "event": "At the beginning of the video, the camera focuses on a man dressed in a red jersey and white shorts. He wears a red baseball cap and black sneakers, and lies flat on the grass. In front of the man is the pitcher's mound of a baseball field, surrounded by green grass. The man is lying with his head close to the edge of the pitcher's mound.", "visual_elements": [ { "content": "The camera focuses on a man dressed in a red jersey and white shorts.", "type": "camera", "weight": 3 }, { "content": "The man wears a red baseball cap and black sneakers.", "type": "attribute", "weight": 3 }, { "content": "The man lies flat on the grass.", "type": "attribute", "weight": 3 }, { "content": "In front of the man is the pitcher's mound of a baseball field.", "type": "attribute", "weight": 3 }, { "content": "The baseball field is surrounded by green grass.", "type": "scene", "weight": 2 }, { "content": "The man lies with his head close to the edge of the pitcher's mound.", "type": "attribute", "weight": 2 } ] }, { "event": "The man begins to slowly move his body, lifting his head slightly and using both arms to propel himself forward, gradually approaching the pitcher's mound. In his right hand, he holds a white baseball. The camera follows the man's movements, slowly panning to the left.", "visual_elements": [ { "content": "The man lifts his head slightly.", "type": "action", "weight": 2 }, { "content": "The man uses both arms to propel himself forward.", "type": "action", "weight": 3 }, { "content": "The man gradually approaches the pitcher's mound.", "type": "action", "weight": 2 }, { "content": "In the man's right hand, he holds a white baseball.", "type": "attribute", "weight": 2 }, { "content": "The camera follows the man's movements, slowly panning to the left.", "type": "camera", "weight": 3 } ] }, { "event": "The camera slightly shakes left and right. The man glances back, transferring the ball from his right hand to his left. He then straightens his upper body and forcefully throws the baseball in his left hand in a low trajectory. Immediately after, he quickly leans down and lies prone on the grass again, with his head resting close to the pitcher's mound.", "visual_elements": [ { "content": "The camera slightly shakes left and right.", "type": "camera", "weight": 3 }, { "content": "The man glances back.", "type": "action", "weight": 3 }, { "content": "The man transfers the ball from his right hand to his left.", "type": "action", "weight": 3 }, { "content": "The man then straightens his upper body.", "type": "action", "weight": 3 }, { "content": "The man forcefully throws the baseball in his left hand in a low trajectory.", "type": "action", "weight": 3 }, { "content": "The man quickly leans down and lies prone on the grass again, with his head resting close to the pitcher's mound.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0730", "video_path": "VIDGEN-1M/7lUl-T1z7ls-Scene-0027.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 6.46, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a pool surrounded by white stone bricks, in which several turtles are swimming. The water surface is dark green with slight ripples. Under the sunlight, the water surface is covered with flickering spots. There are white railings around the pool, and several people in summer clothes are standing outside the railings watching the turtles. On the left is an adult wearing a blue short-sleeved shirt, with an orange bracelet on his right wrist, and his arm is stretched into the railing. There is also a child outside the railing on the upper left of the screen. There is a baby carriage on the right side of the child. In the background is a lime path. On the other side of the lime path is an area surrounded by barbed wire, with low rockery, green grass and trees.\nThen, the camera moves to the lower right, gradually focusing on the turtle, looking down at the turtle from above the water. The turtle and the railing in the upper right water gradually disappear from the picture. The turtles sometimes surface and sometimes dive into the water. The sun makes the water surface brighter.", "events": [ { "event": "At the beginning of the video, the camera is focused on a pool surrounded by white stone bricks, in which several turtles are swimming. The water surface is dark green with slight ripples. Under the sunlight, the water surface is covered with flickering spots. There are white railings around the pool, and several people in summer clothes are standing outside the railings watching the turtles. On the left is an adult wearing a blue short-sleeved shirt, with an orange bracelet on his right wrist, and his arm is stretched into the railing. There is also a child outside the railing on the upper left of the screen. There is a baby carriage on the right side of the child. In the background is a lime path. On the other side of the lime path is an area surrounded by barbed wire, with low rockery, green grass and trees.", "visual_elements": [ { "content": "The camera is pointed at a pool surrounded by white stone bricks.", "type": "camera", "weight": 3 }, { "content": "There are many turtles swimming in the pool.", "type": "action", "weight": 3 }, { "content": "The water surface is dark green with slight ripples.", "type": "scene", "weight": 2 }, { "content": "Under the sunlight, the water surface is shining with flickering spots.", "type": "scene", "weight": 1 }, { "content": "There are white railings around the pool.", "type": "attribute", "weight": 2 }, { "content": "Several people in summer clothes are standing outside the railings watching the turtles.", "type": "attribute", "weight": 1 }, { "content": "On the left side of the screen is an adult wearing a blue short-sleeved shirt, with an orange bracelet on his right wrist, and his arm stretched into the railing.", "type": "attribute", "weight": 1 }, { "content": "On the upper left of the screen is a child wearing a striped jumpsuit.", "type": "attribute", "weight": 1 }, { "content": "There is a baby carriage on the right of the child.", "type": "attribute", "weight": 1 }, { "content": "The background is a lime path.", "type": "scene", "weight": 2 }, { "content": "On the other side of the lime path is an area surrounded by barbed wire, with low rockery, green grass and trees.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves to the lower right, gradually focusing on the turtle, looking down at the turtle from above the water. The turtle and the railing in the upper right water gradually disappear from the picture. The turtles sometimes surface and sometimes dive into the water. The sun makes the water surface brighter.", "visual_elements": [ { "content": "The camera moves to the lower right.", "type": "camera", "weight": 3 }, { "content": "The camera gradually focuses on the turtle, looking down at the turtle from above the water.", "type": "camera", "weight": 3 }, { "content": "The turtle and the railing in the upper right water gradually disappear from the screen.", "type": "attribute", "weight": 2 }, { "content": "The turtles sometimes surface and sometimes dive into the water.", "type": "action", "weight": 2 }, { "content": "The sun makes the water surface brighter.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0731", "video_path": "VIDGEN-1M/7xbCgVIbFVI-Scene-0365.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 5.71, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a pair of hands holding a white paper. The paper has obvious creases and contains several lines of text about social media accounts. The hands have a darker complexion and the nails on the right hand are painted black. In the background is a brightly lit parking lot with a smooth, reflective surface. There are multiple cars parked on both sides of the screen in various colors, including silver, black, and blue.\nThe camera moves slightly upwards as the hand moves. The hand is about to put the paper down. At the end of the video, the camera goes blurry.", "events": [ { "event": "The video begins with the camera focusing on a pair of hands holding a white paper. The paper has obvious creases and contains several lines of text about social media accounts. The hands have a darker complexion and the nails on the right hand are painted black. In the background is a brightly lit parking lot with a smooth, reflective surface. There are multiple cars parked on both sides of the screen in various colors, including silver, black, and blue.", "visual_elements": [ { "content": "The camera is focused on a pair of hands holding a white paper.", "type": "camera", "weight": 3 }, { "content": "The paper has a noticeable crease.", "type": "attribute", "weight": 2 }, { "content": "The paper has several lines of text written on it, which refer to social media accounts.", "type": "attribute", "weight": 1 }, { "content": "The hands have a darker skin tone, and the nails on the right hand are painted black.", "type": "attribute", "weight": 2 }, { "content": "In the background is a brightly lit parking lot.", "type": "scene", "weight": 3 }, { "content": "The parking lot has a smooth and reflective surface.", "type": "scene", "weight": 2 }, { "content": "There are several cars parked on both sides of the frame, in various colors, including silver, black, and blue.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves slightly upwards as the hand moves. The hand is about to put the paper down. At the end of the video, the camera goes blurry.", "visual_elements": [ { "content": "The camera moves slightly upwards as the hands move.", "type": "camera", "weight": 3 }, { "content": "The hands are about to put the paper down.", "type": "action", "weight": 2 }, { "content": "At the end of the video, the camera becomes blurry.", "type": "camera", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0732", "video_path": "VIDGEN-1M/87BLjo0RN_U-Scene-0142.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 19.73, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, two men are engaged in a basketball game. The man on the left wears a black sleeveless jersey and black shorts, holding a basketball. The man on the right is dressed in a yellow short-sleeved T-shirt and white shorts. In the background, a referee in a black and white striped referee uniform stands courtside, with a whistle in his mouth.\nThe camera pans to the right, capturing the scene where the man in the yellow T-shirt is facing the man in the black jersey. The front of the yellow jersey has the words \"T JASS 22\" printed on it. The man in the black jersey is dribbling the ball while running towards the opposing team's hoop. His back is turned to the camera, revealing orange letters and the number 12 on the back of his jersey. The man in the yellow jersey has been defending on the left side of the man in the black jersey. More players in black and yellow jerseys appear in the scene.\nThe camera pans the movements of the man in the black jersey, first moving to the right and then to the left. The man in the black jersey initially approaches the opposing team's hoop but gradually shifts towards the right side of the shooting area. More players in yellow jerseys surround the man in the black jersey who is holding the ball, and one of the yellow jersey players is blocked by a teammate of the man in the black jersey.\nThe camera continues to pan to the right. The man in the black jersey continues to dribble the ball, and a player dressed in a yellow jersey and black shorts stands in his way. Suddenly, the man in the black jersey forcefully throws the basketball in his hand towards the left side of the shooting area. On the right side of the court, there is a rudimentary bleacher stand, and several players in black jerseys, who are potential substitutes, stand in front of the benches watching the game.", "events": [ { "event": "At the beginning of the video, two men are engaged in a basketball game. The man on the left wears a black sleeveless jersey and black shorts, holding a basketball. The man on the right is dressed in a yellow short-sleeved T-shirt and white shorts. In the background, a referee in a black and white striped referee uniform stands courtside, with a whistle in his mouth.", "visual_elements": [ { "content": "There are two men are engaged in a basketball game on the scene.", "type": "attribute", "weight": 3 }, { "content": "The man on the left wears a black sleeveless jersey and black shorts.", "type": "attribute", "weight": 3 }, { "content": "The man on the left holds a basketball.", "type": "attribute", "weight": 3 }, { "content": "The man on the right is dressed in a yellow short-sleeved T-shirt and white shorts.", "type": "attribute", "weight": 3 }, { "content": "In the background, a referee in a black and white striped referee uniform stands courtside.", "type": "scene", "weight": 2 }, { "content": "There is a whistle in the referee's mouth.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera pans to the right, capturing the scene where the man in the yellow T-shirt is facing the man in the black jersey. The front of the yellow jersey has the words \"T JASS 22\" printed on it. The man in the black jersey is dribbling the ball while running towards the opposing team's hoop. His back is turned to the camera, revealing orange letters and the number 12 on the back of his jersey. The man in the yellow jersey has been defending on the left side of the man in the black jersey. More players in black and yellow jerseys appear in the scene.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The camera is capturing the scene where the man in the yellow T-shirt is facing the man in the black jersey.", "type": "attribute", "weight": 2 }, { "content": "The front of the yellow jersey has the words \"T JASS 22\" printed on it.", "type": "attribute", "weight": 2 }, { "content": "The man in the black jersey is dribbling the ball while running towards the opposing team's hoop.", "type": "action", "weight": 3 }, { "content": "The man in the black jersey's back is turned to the camera.", "type": "attribute", "weight": 2 }, { "content": "The back of the black jersey has orange letters and the number 12 displayed on it.", "type": "attribute", "weight": 2 }, { "content": "The man in the yellow jersey has been defending on the left side of the man in the black jersey.", "type": "action", "weight": 3 }, { "content": "More players in black and yellow jerseys appear in the scene.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans the movements of the man in the black jersey, first moving to the right and then to the left. The man in the black jersey initially approaches the opposing team's hoop but gradually shifts towards the right side of the shooting area. More players in yellow jerseys surround the man in the black jersey who is holding the ball, and one of the yellow jersey players is blocked by a teammate of the man in the black jersey.", "visual_elements": [ { "content": "The camera pans the movements of the man in the black jersey, first moving to the right and then to the left.", "type": "camera", "weight": 3 }, { "content": "The man in the black jersey initially approaches the opposing team's hoop but gradually shifts towards the right side of the shooting area.", "type": "action", "weight": 3 }, { "content": "More players in yellow jerseys surround the man in the black jersey who is holding the ball.", "type": "action", "weight": 3 }, { "content": "One of the yellow jersey players is blocked by a teammate of the man in the black jersey.", "type": "action", "weight": 2 } ] }, { "event": "The camera continues to pan to the right. The man in the black jersey continues to dribble the ball, and a player dressed in a yellow jersey and black shorts stands in his way. Suddenly, the man in the black jersey forcefully throws the basketball in his hand towards the left side of the shooting area. On the right side of the court, there is a rudimentary bleacher stand, and several players in black jerseys, who are potential substitutes, stand in front of the benches watching the game.", "visual_elements": [ { "content": "The camera continues to pan to the right.", "type": "camera", "weight": 3 }, { "content": "The man in the black jersey continues to dribble the ball.", "type": "action", "weight": 3 }, { "content": "A player dressed in a yellow jersey and black shorts stands in his way.", "type": "action", "weight": 3 }, { "content": "The man in the black jersey forcefully throws the basketball in his hand towards the left side of the shooting area.", "type": "action", "weight": 3 }, { "content": "On the right side of the court, there is a rudimentary bleacher stand.", "type": "scene", "weight": 2 }, { "content": "There are several players in black jerseys, who are potential substitutes, stand in front of the benches watching the game.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 24 }, { "index": "TUNA_0733", "video_path": "VIDGEN-1M/8FIcHjtdjYs-Scene-0021.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 10.44, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a male player dressed in a white baseball uniform. He wears a black baseball cap and holds a black baseball glove in his right hand. He is walking from the left side of the scene to the right, with a stand filled with spectators serving as the backdrop.\nThe camera cuts to a panoramic view of the baseball field, with subtitles in the top left corner displaying the score and inning of the game. On the field, a pitcher dressed in a white jersey and wearing a black baseball cap stands on the pitcher's mound. He wears a black baseball glove on his left hand and holds a baseball behind the glove with his right hand. In front of the pitcher is the home plate, where a batter dressed in a gray jersey stands. Behind the batter are the catcher, who is wearing protective gear, and a referee.\nThe pitcher first lifts his left leg and then places it back down. He then turns his body to the side and forcefully throws the ball in his right hand forward. The batter swings the bat in an attempt to hit the ball but misses, and the ball flies towards the catcher, who catches it. In the background, some spectators sitting in the stands are applauding and cheering, while others are engaged in conversation.\nThe camera zooms in. After completing his pitch, the pitcher walks forward with his back turned to the camera. The catcher in front of the pitcher walks to the right side of the scene and exits the scene. The batter strides forward with a sweeping motion of his arm towards the left front of him.", "events": [ { "event": "At the beginning of the video, the camera focuses on a male player dressed in a white baseball uniform. He wears a black baseball cap and holds a black baseball glove in his right hand. He is walking from the left side of the scene to the right, with a stand filled with spectators serving as the backdrop.", "visual_elements": [ { "content": "The camera focuses on a male player dressed in a white baseball uniform.", "type": "camera", "weight": 3 }, { "content": "The player wears a black baseball cap.", "type": "attribute", "weight": 3 }, { "content": "The player holds a black baseball glove in his right hand.", "type": "attribute", "weight": 2 }, { "content": "The player is walking from the left side of the scene to the right.", "type": "action", "weight": 3 }, { "content": "The background is a stand filled with spectators.", "type": "scene", "weight": 2 } ] }, { "event": "The camera cuts to a panoramic view of the baseball field, with subtitles in the top left corner displaying the score and inning of the game. On the field, a pitcher dressed in a white jersey and wearing a black baseball cap stands on the pitcher's mound. He wears a black baseball glove on his left hand and holds a baseball behind the glove with his right hand. In front of the pitcher is the home plate, where a batter dressed in a gray jersey stands. Behind the batter are the catcher, who is wearing protective gear, and a referee.", "visual_elements": [ { "content": "The camera cuts to a panoramic view of the baseball field.", "type": "camera", "weight": 3 }, { "content": "The subtitles in the top left corner displaying the score and inning of the game.", "type": "scene", "weight": 1 }, { "content": "On the field, a pitcher dressed in a white jersey and wearing a black baseball cap stands on the pitcher's mound.", "type": "attribute", "weight": 3 }, { "content": "The pitcher wears a black baseball glove on his left hand.", "type": "attribute", "weight": 2 }, { "content": "The pitcher holds a baseball behind the glove with his right hand.", "type": "attribute", "weight": 2 }, { "content": "In front of the pitcher is the home plate.", "type": "attribute", "weight": 2 }, { "content": "A batter dressed in a gray jersey stands on the home plate.", "type": "attribute", "weight": 2 }, { "content": "Behind the batter are the catcher, who is wearing protective gear, and a referee.", "type": "attribute", "weight": 2 } ] }, { "event": "The pitcher first lifts his left leg and then places it back down. He then turns his body to the side and forcefully throws the ball in his right hand forward. The batter swings the bat in an attempt to hit the ball but misses, and the ball flies towards the catcher, who catches it. In the background, some spectators sitting in the stands are applauding and cheering, while others are engaged in conversation.", "visual_elements": [ { "content": "The pitcher first lifts his left leg and then places it back down.", "type": "action", "weight": 3 }, { "content": "The pitcher then turns his body to the side and forcefully throws the ball in his right hand forward.", "type": "action", "weight": 3 }, { "content": "The batter swings the bat in an attempt to hit the ball but misses.", "type": "action", "weight": 3 }, { "content": "The ball flies towards the catcher, who catches.", "type": "action", "weight": 3 }, { "content": "In the background, some spectators sitting in the stands are applauding and cheering, while others are engaged in conversation.", "type": "scene", "weight": 2 } ] }, { "event": "The camera zooms in. After completing his pitch, the pitcher walks forward with his back turned to the camera. The catcher in front of the pitcher walks to the right side of the scene and exits the scene. The batter strides forward with a sweeping motion of his arm towards the left front of him.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "After completing his pitch, the pitcher walks forward with his back turned to the camera.", "type": "action", "weight": 3 }, { "content": "The catcher in front of the pitcher walks to the right side of the scene and exits the scene.", "type": "action", "weight": 3 }, { "content": "The batter strides forward with a sweeping motion of his arm towards the left front of him.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0734", "video_path": "VIDGEN-1M/8WyqXrXassg-Scene-0099.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 5.44, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is aimed at a pitcher wearing a white baseball uniform and number 63. He is standing on the pitcher's mound wearing a blue baseball cap, with his left leg raised to prepare for pitching. In front of the pitcher is a referee wearing a black short-sleeved shirt, gray trousers, a black cap and his hands on his waist. There are some spectators in the blue audience seats in the background, and substitutes wearing white or dark clothes are sitting under the audience seats.\nThe pitcher starts to pitch, his left leg is arched, his right toe is raised, his body is leaning forward, his left hand wearing a baseball glove is backward, and his right hand holds the baseball and throws it hard. The camera moves to the left with the baseball thrown. On the left side of the picture are two batters wearing black baseball uniforms, black hats and white gloves. They stand in front of the home plate and prepare to hit the ball but miss. Behind the batter is a defender wearing a white baseball uniform and a blue helmet. The defender's left hand wearing a glove catches the flying baseball.", "events": [ { "event": "At the beginning of the video, the camera is aimed at a pitcher wearing a white baseball uniform and number 63. He is standing on the pitcher's mound wearing a blue baseball cap, with his left leg raised to prepare for pitching. In front of the pitcher is a referee wearing a black short-sleeved shirt, gray trousers, a black cap and his hands on his waist. There are some spectators in the blue audience seats in the background, and substitutes wearing white or dark clothes are sitting under the audience seats.", "visual_elements": [ { "content": "The camera is aimed at a pitcher wearing a white baseball uniform and the number 63.", "type": "camera", "weight": 3 }, { "content": "The pitcher is standing on the pitcher's mound wearing a blue baseball cap.", "type": "attribute", "weight": 2 }, { "content": "The pitcher raises his left leg to prepare for pitching.", "type": "action", "weight": 3 }, { "content": "In front of the pitcher is the referee wearing a black short-sleeved shirt, gray trousers, a black cap and his hands on his waist.", "type": "attribute", "weight": 2 }, { "content": "There are some spectators in the blue audience seats in the background.", "type": "scene", "weight": 2 }, { "content": "Under the audience seats sit substitutes wearing white or dark clothes.", "type": "attribute", "weight": 1 } ] }, { "event": "The pitcher starts to pitch, his left leg is arched, his right toe is raised, his body is leaning forward, his left hand wearing a baseball glove is backward, and his right hand holds the baseball and throws it hard. The camera moves to the left with the baseball thrown. On the left side of the picture are two batters wearing black baseball uniforms, black hats and white gloves. They stand in front of the home plate and prepare to hit the ball but miss. Behind the batter is a defender wearing a white baseball uniform and a blue helmet. The defender's left hand wearing a glove catches the flying baseball.", "visual_elements": [ { "content": "The pitcher begins to pitch.", "type": "action", "weight": 3 }, { "content": "The pitcher's left leg is arched, his right toe is raised, and his body is leaning forward.", "type": "action", "weight": 3 }, { "content": "The pitcher's left hand wearing a baseball glove is backward, and his right hand holds the baseball and throws it hard.", "type": "action", "weight": 3 }, { "content": "The camera moves to the left with the baseball thrown.", "type": "camera", "weight": 3 }, { "content": "On the left side of the picture are two batters wearing black baseball uniforms, black hats and white gloves.", "type": "attribute", "weight": 2 }, { "content": "The two stand in front of the home plate and prepare to hit the ball but miss.", "type": "action", "weight": 3 }, { "content": "Behind the batter is the fielder wearing a white baseball uniform and a blue helmet.", "type": "attribute", "weight": 2 }, { "content": "The fielder's left hand wearing a glove catches the flying baseball.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0735", "video_path": "VIDGEN-1M/8fWbTuipSIA-Scene-0076.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 7.87, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a middle-aged man standing in front of a garage door. The man is wearing a white long-sleeved shirt and light-colored pants. The man is holding a large white gas tank with a white balloon attached to the valve. The man is untying the balloon from the valve. On the left side of the screen, there are various tools and equipment inside the garage, including orange cables hanging on the wall, tool boxes on shelves, and some spray cans. The walls of the garage are made of red brick. On the right side of the screen, there are some trees with green leaves growing out. There are several parked cars in the distance, one of which is a red and black bus. There are green and gray garbage cans on the ground.\nThe camera moves to the upper right and then remains fixed at an angle. The man removes the balloon from the valve and begins to adjust the balloon with his hands.", "events": [ { "event": "The video begins with the camera focusing on a middle-aged man standing in front of a garage door. The man is wearing a white long-sleeved shirt and light-colored pants. The man is holding a large white gas tank with a white balloon attached to the valve. The man is untying the balloon from the valve. On the left side of the screen, there are various tools and equipment inside the garage, including orange cables hanging on the wall, tool boxes on shelves, and some spray cans. The walls of the garage are made of red brick. On the right side of the screen, there are some trees with green leaves growing out. There are several parked cars in the distance, one of which is a red and black bus. There are green and gray garbage cans on the ground.", "visual_elements": [ { "content": "The camera is focused on a middle-aged man standing at the door of a garage.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a white long-sleeved shirt and light-colored pants.", "type": "attribute", "weight": 2 }, { "content": "The man is holding a large white gas tank.", "type": "attribute", "weight": 2 }, { "content": "A white balloon is attached to the tank.", "type": "attribute", "weight": 2 }, { "content": "The man is untying the balloon from the valve.", "type": "action", "weight": 3 }, { "content": "The interior of the garage on the left side of the screen has various tools and equipment, including orange cables hanging on the wall, tool boxes on the shelf, and some spray cans.", "type": "scene", "weight": 2 }, { "content": "The walls of the garage are made of red bricks.", "type": "scene", "weight": 1 }, { "content": "There are some trees on the right side of the screen, and the green leaves of the trees have grown.", "type": "scene", "weight": 2 }, { "content": "There are several parked cars in the distance.", "type": "scene", "weight": 2 }, { "content": "There is a red and black bus in the distance.", "type": "scene", "weight": 1 }, { "content": "There are green and gray trash cans on the ground.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the upper right and then remains fixed at an angle. The man removes the balloon from the valve and begins to adjust the balloon with his hands.", "visual_elements": [ { "content": "The camera moves to the upper right and then fixes at an angle.", "type": "camera", "weight": 3 }, { "content": "The man takes the balloon off the valve and begins to adjust the balloon with both hands.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0736", "video_path": "VIDGEN-1M/9Ae9bU0R3Ws-Scene-0069.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 13.48, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a dark sea, the water fluctuating slightly.\nThe camera moves upwards, and more waves appear in the frame, white waves rolling over the sea, with a small island in the distance, in a blurred image.", "events": [ { "event": "The video begins with the camera focusing on a dark sea, the water fluctuating slightly.", "visual_elements": [ { "content": "The camera focuses on a dark sea.", "type": "camera", "weight": 3 }, { "content": "The sea is fluctuating slightly.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves upwards, and more waves appear in the frame, white waves rolling over the sea, with a small island in the distance, in a blurred image.", "visual_elements": [ { "content": "The camera moves upwards.", "type": "camera", "weight": 3 }, { "content": "More waves appear in the frame.", "type": "scene", "weight": 2 }, { "content": "White waves roll over the surface of the sea.", "type": "scene", "weight": 2 }, { "content": "In the distance is a small island.", "type": "scene", "weight": 2 }, { "content": "The frame is blurred.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0737", "video_path": "VIDGEN-1M/9ED9kMGgY4w-Scene-0038.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 5.47, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, there are two male characters in the scene. The man facing away from the camera wears a gray baseball uniform with the words \"29 SAMARDZIJA\" printed on the back. He is wearing a black baseball cap and has long hair. The man facing the man with long hair is wearing black and orange protective gear and a matching black and orange helmet. In the top left corner of the screen, the score is displayed, showing that the SF team has defeated the TB team with a score of 5 to 1, and the game has ended (FINAL). The background features green grass and red soil.\nAt the beginning of the video, the man with long hair turns to his right, revealing that the front of his baseball uniform has the words \"SAN FRANCISCO\" printed on it. The man wearing protective gear taps the man with long hair on the lower back, and both of them start walking forward while maintaining a conversational posture. The man in protective gear uses his right hand to remove his helmet and holds it under his left arm. The camera pans their movement smoothly to the right. As they walk, the proportion of the red soil in the background increases, with green grass still surrounding it.", "events": [ { "event": "At the beginning of the video, there are two male characters in the scene. The man facing away from the camera wears a gray baseball uniform with the words \"29 SAMARDZIJA\" printed on the back. He is wearing a black baseball cap and has long hair. The man facing the man with long hair is wearing black and orange protective gear and a matching black and orange helmet. In the top left corner of the screen, the score is displayed, showing that the SF team has defeated the TB team with a score of 5 to 1, and the game has ended (FINAL). The background features green grass and red soil.", "visual_elements": [ { "content": "There are two male characters in the scene.", "type": "attribute", "weight": 3 }, { "content": "The man facing away from the camera wears a gray baseball uniform.", "type": "attribute", "weight": 3 }, { "content": "The gray baseball uniform has the words \"29 SAMARDZIJA\" printed on the back.", "type": "attribute", "weight": 2 }, { "content": "The man facing away from the camera is wearing a black baseball cap and has long hair.", "type": "attribute", "weight": 2 }, { "content": "The man facing the man with long hair is wearing black and orange protective gear.", "type": "attribute", "weight": 3 }, { "content": "The man facing the man with long hair is wearing a black and orange helmet.", "type": "attribute", "weight": 3 }, { "content": "In the top left corner of the screen, the score is displayed, showing that the SF team has defeated the TB team with a score of 5 to 1, and the game has ended (FINAL).", "type": "scene", "weight": 1 }, { "content": "The background features green grass and red soil.", "type": "scene", "weight": 2 } ] }, { "event": "At the beginning of the video, the man with long hair turns to his right, revealing that the front of his baseball uniform has the words \"SAN FRANCISCO\" printed on it. The man wearing protective gear taps the man with long hair on the lower back, and both of them start walking forward while maintaining a conversational posture. The man in protective gear uses his right hand to remove his helmet and holds it under his left arm. The camera pans their movement smoothly to the right. As they walk, the proportion of the red soil in the background increases, with green grass still surrounding it.", "visual_elements": [ { "content": "The man with long hair turns to his right.", "type": "action", "weight": 2 }, { "content": "The front of the man with long hair baseball uniform has the words \"SAN FRANCISCO\" printed on it.", "type": "attribute", "weight": 2 }, { "content": "The man wearing protective gear taps the man with long hair on the lower back.", "type": "action", "weight": 3 }, { "content": "Both of them start walking forward.", "type": "action", "weight": 3 }, { "content": "Both of them maintains a conversational posture.", "type": "action", "weight": 3 }, { "content": "The man in protective gear uses his right hand to remove his helmet and holds it under his left arm.", "type": "action", "weight": 3 }, { "content": "The camera pans their movement smoothly to the right.", "type": "camera", "weight": 3 }, { "content": "The proportion of the red soil in the background increases.", "type": "scene", "weight": 2 }, { "content": "The green grass still surrounds the red soil.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0738", "video_path": "VIDGEN-1M/9HDH1wbIi2o-Scene-0046.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 9.21, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera looks down at the basketball court from above. In the distance, there is a player in a white jersey and a player in a black jersey. The player in the white jersey is jumping up to shoot. The basketball hits the goal frame. There is a black letter \"CLIPPERS.COM\" on the top of the goal board. A player in a white jersey is defending a player in a black jersey on the left side of the goal. During the defense and attack, the player in the white jersey presses the player in the black jersey to the blue ground.\nThe basketball that missed the shot is scrambled by everyone. The camera moves up. Finally, the player in the black jersey with the number 11 gets the basketball. He pats the basketball and leaves the blue ground. The remaining players and referees are watching the two people who fell to the ground due to the dispute. The audience seats in the background are full of spectators. The colors of this basketball court include yellow, beige, blue and red. There are also obvious basketball court signs and billboards on the court.", "events": [ { "event": "At the beginning of the video, the camera looks down at the basketball court from above. In the distance, there is a player in a white jersey and a player in a black jersey. The player in the white jersey is jumping up to shoot. The basketball hits the goal frame. There is a black letter \"CLIPPERS.COM\" on the top of the goal board. A player in a white jersey is defending a player in a black jersey on the left side of the goal. During the defense and attack, the player in the white jersey presses the player in the black jersey to the blue ground.", "visual_elements": [ { "content": "The camera looks down at the basketball court from above.", "type": "camera", "weight": 3 }, { "content": "There is a player in a white jersey and a player in a black jersey in the distance.", "type": "attribute", "weight": 2 }, { "content": "The player in the white jersey is jumping up to shoot.", "type": "action", "weight": 3 }, { "content": "The basketball hits the goal frame.", "type": "attribute", "weight": 2 }, { "content": "There is a black letter \"CLIPPERS.COM\" on the top of the goal board.", "type": "attribute", "weight": 1 }, { "content": "A player in a white jersey is defending a player in a black jersey on the left side of the goal.", "type": "attribute", "weight": 2 }, { "content": "During defense and offense, the player in the white jersey presses the player in the black jersey down on the blue ground.", "type": "action", "weight": 3 } ] }, { "event": "The basketball that missed the shot is scrambled by everyone. The camera moves up. Finally, the player in the black jersey with the number 11 gets the basketball. He pats the basketball and leaves the blue ground. The remaining players and referees are watching the two people who fell to the ground due to the dispute. The audience seats in the background are full of spectators. The colors of this basketball court include yellow, beige, blue and red. There are also obvious basketball court signs and billboards on the court.", "visual_elements": [ { "content": "The basketball that was not shot is scrambled for by everyone.", "type": "action", "weight": 3 }, { "content": "The camera moves up.", "type": "camera", "weight": 3 }, { "content": "Finally, the player in the black jersey with the number 11 gets the basketball.", "type": "attribute", "weight": 3 }, { "content": "The number 11 pats the basketball and leaves the blue ground.", "type": "action", "weight": 3 }, { "content": "The remaining players and referees are watching the two people who fell to the ground due to the dispute.", "type": "action", "weight": 2 }, { "content": "The audience seats in the background are full of spectators.", "type": "scene", "weight": 2 }, { "content": "The colors of this basketball court include yellow, beige, blue and red.", "type": "attribute", "weight": 2 }, { "content": "There are also obvious basketball court signs and billboards on the court.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0739", "video_path": "VIDGEN-1M/9LoulrG3X8A-Scene-0031.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 11.72, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on two young white men in front of a row of slot machines. In front of the slot machines, the man on the left has short light brown hair and wears a light-colored T-shirt. The man on the right has short dark brown hair and wears a dark T-shirt. The man in the light-colored T-shirt is mainly responsible for key operation, while the man in the dark T-shirt observes from the side. The screen of the slot machine displays various colorful patterns and numbers. The background wall is yellow, with some decorative paintings and logos on the wall. There are several bright lights on the ceiling, illuminating the entire scene. There is a small fan on the ground. The video is blurry at the beginning.\nThe camera moves slightly to the right, and the video becomes clear. The camera is shaking throughout the video. The man in light clothes presses the button with his right hand, while the man in dark clothes observes and smiles while talking, occasionally turning to the slot machine opposite. On the right side of the screen, a man in black clothes appears, pressing the button of the slot machine.", "events": [ { "event": "At the beginning of the video, the camera is focused on two young white men in front of a row of slot machines. In front of the slot machines, the man on the left has short light brown hair and wears a light-colored T-shirt. The man on the right has short dark brown hair and wears a dark T-shirt. The man in the light-colored T-shirt is mainly responsible for key operation, while the man in the dark T-shirt observes from the side. The screen of the slot machine displays various colorful patterns and numbers. The background wall is yellow, with some decorative paintings and logos on the wall. There are several bright lights on the ceiling, illuminating the entire scene. There is a small fan on the ground. The video is blurry at the beginning.", "visual_elements": [ { "content": "The camera is pointed at two young white men in front of a row of slot machines.", "type": "camera", "weight": 3 }, { "content": "In front of the slot machines, the man on the left has short light brown hair and wears a light-colored T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The man on the right has short dark brown hair and wears a dark-colored T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The man in the light-colored T-shirt is mainly responsible for key operation, while the man in the dark-colored T-shirt observes from the side.", "type": "action", "weight": 3 }, { "content": "The screen of the slot machine displays various colorful patterns and numbers.", "type": "scene", "weight": 2 }, { "content": "The background wall is yellow.", "type": "scene", "weight": 2 }, { "content": "There are some decorative paintings and logos on the wall.", "type": "scene", "weight": 1 }, { "content": "There are several bright lights on the ceiling, illuminating the entire scene.", "type": "scene", "weight": 1 }, { "content": "There is a small fan on the ground.", "type": "scene", "weight": 1 }, { "content": "The video is blurry at the beginning.", "type": "camera", "weight": 2 } ] }, { "event": "The camera moves slightly to the right, and the video becomes clear. The camera is shaking throughout the video. The man in light clothes presses the button with his right hand, while the man in dark clothes observes and smiles while talking, occasionally turning to the slot machine opposite. On the right side of the screen, a man in black clothes appears, pressing the button of the slot machine.", "visual_elements": [ { "content": "The camera moves slightly to the right, and the video becomes clearer.", "type": "camera", "weight": 3 }, { "content": "The camera is shaking throughout the video.", "type": "camera", "weight": 3 }, { "content": "The man in light-colored clothes presses the button with his right hand.", "type": "action", "weight": 2 }, { "content": "The man in dark-colored clothes observes and smiles and talks.", "type": "action", "weight": 2 }, { "content": "The man in dark-colored clothes occasionally turns to the slot machine opposite.", "type": "action", "weight": 1 }, { "content": "The hand of a man in black clothes appears on the right side of the screen, pressing the button of the slot machine.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0740", "video_path": "VIDGEN-1M/9PwQs0muT24-Scene-0027.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 9.48, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, there are four people playing football on the grass. A long-haired woman in a purple top and black pants stands on the left side of the screen. Two little boys in short-sleeved shirts and shorts are in the middle. One boy wears a white top and army green shorts, and the other boy wears a gray top and denim shorts. A man in a black top and khaki shorts stands on the right side.\nThe woman kicks the blue ball from the boy in the white short-sleeved shirt to the man, who then kicks the ball to the woman. The two little boys try to grab the ball between the man and the woman but fail. The boy in the gray short-sleeved shirt falls on the green grass.\nThe camera zooms in, and the woman kicks the ball to avoid the boy in the white short-sleeved shirt. The boy who falls slowly stands up facing the left side of the screen. The background is a light-colored house with a wooden balcony next to it, and there are some trees and other houses in the distance.", "events": [ { "event": "At the beginning of the video, there are four people playing football on the grass. A long-haired woman in a purple top and black pants stands on the left side of the screen. Two little boys in short-sleeved shirts and shorts are in the middle. One boy wears a white top and army green shorts, and the other boy wears a gray top and denim shorts. A man in a black top and khaki shorts stands on the right side.", "visual_elements": [ { "content": "There are four people playing football on the grass in the picture.", "type": "action", "weight": 3 }, { "content": "A long-haired woman wearing a purple top and black pants stands on the left side of the picture.", "type": "attribute", "weight": 3 }, { "content": "Two little boys wearing short-sleeved shirts and shorts are in the middle.", "type": "attribute", "weight": 3 }, { "content": "One boy wears a white top and army green shorts.", "type": "attribute", "weight": 3 }, { "content": "Another boy wears a gray top and denim shorts.", "type": "attribute", "weight": 3 }, { "content": "A man wearing a black top and khaki shorts stands on the right.", "type": "attribute", "weight": 3 } ] }, { "event": "The woman kicks the blue ball from the boy in the white short-sleeved shirt to the man, who then kicks the ball to the woman. The two little boys try to grab the ball between the man and the woman but fail. The boy in the gray short-sleeved shirt falls on the green grass.", "visual_elements": [ { "content": "The woman kicks the blue ball from the boy wearing a white short-sleeved shirt to the man.", "type": "action", "weight": 3 }, { "content": "The man then kicks the ball to the woman.", "type": "action", "weight": 3 }, { "content": "Two little boys try to grab the ball between the man and the woman but fail.", "type": "action", "weight": 2 }, { "content": "The boy wearing a gray short-sleeved shirt falls on the green grass.", "type": "action", "weight": 3 } ] }, { "event": "The camera zooms in, and the woman kicks the ball to avoid the boy in the white short-sleeved shirt. The boy who falls slowly stands up facing the left side of the screen. The background is a light-colored house with a wooden balcony next to it, and there are some trees and other houses in the distance.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The woman kicks the ball to avoid the boy wearing a white short-sleeved shirt who tries to grab the ball.", "type": "action", "weight": 3 }, { "content": "The boy who fell slowly stands up facing the left side of the picture.", "type": "action", "weight": 2 }, { "content": "The background is a light-colored house.", "type": "scene", "weight": 2 }, { "content": "There is a wooden balcony next to the house.", "type": "scene", "weight": 2 }, { "content": "There are some trees and other houses in the distance.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0741", "video_path": "VIDGEN-1M/9gJqMMMthlg-Scene-0011.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 8.74, "resolution": { "width": 1280, "height": 720 }, "caption": "In this video, the ice hockey game is going on fiercely. There are mainly two players in the picture. First, the player in the blue jersey (back number 97) and the player in the white jersey (back number 14) are skating at high speed on the ice rink. The player in the blue jersey holds the ball, and the player in the white jersey follows closely to block it.\nThe camera moves to the left with the two people, and a goalkeeper wearing a white jersey and protective tools can be seen on the right side of the screen. When approaching the opponent's goal, the player in the blue jersey turns left, and the player in the white jersey (back number 14) collides with the goalkeeper of the same team and falls to the ground, and the goal is turned 90 degrees to the left. The camera zooms in, and the player in the blue jersey (back number 97) celebrates with his left hand after scoring a goal. At the end of the video, another player in white enters the picture from the right side of the picture to merge with the two white players who fell. There are the boundary lines and billboards of the ice rink in the background.", "events": [ { "event": "In this video, the ice hockey game is going on fiercely. There are mainly two players in the picture. First, the player in the blue jersey (back number 97) and the player in the white jersey (back number 14) are skating at high speed on the ice rink. The player in the blue jersey holds the ball, and the player in the white jersey follows closely to block it.", "visual_elements": [ { "content": "The ice hockey game is going on fiercely.", "type": "attribute", "weight": 3 }, { "content": "There are mainly two players in the picture.", "type": "attribute", "weight": 2 }, { "content": "The player in the blue jersey (back number 97) and the player in the white jersey (back number 14) are skating at high speed on the ice rink.", "type": "action", "weight": 3 }, { "content": "The player in the blue jersey holds the ball.", "type": "action", "weight": 3 }, { "content": "The player in the white jersey follows closely to block it.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves to the left with the two people, and a goalkeeper wearing a white jersey and protective tools can be seen on the right side of the screen. When approaching the opponent's goal, the player in the blue jersey turns left, and the player in the white jersey (back number 14) collides with the goalkeeper of the same team and falls to the ground, and the goal is turned 90 degrees to the left. The camera zooms in, and the player in the blue jersey (back number 97) celebrates with his left hand after scoring a goal. At the end of the video, another player in white enters the picture from the right side of the picture to merge with the two white players who fell. There are the boundary lines and billboards of the ice rink in the background.", "visual_elements": [ { "content": "The camera moves to the left with the two people.", "type": "camera", "weight": 3 }, { "content": "A goalkeeper wearing a white jersey and protective tools can be seen on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "When approaching the opponent's goal, the player in the blue jersey turns left.", "type": "action", "weight": 3 }, { "content": "The player in the white jersey (back number 14) collides with the goalkeeper of the same team and falls to the ground.", "type": "action", "weight": 3 }, { "content": "The goal is turned 90 degrees to the left.", "type": "action", "weight": 2 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The player in the blue jersey (back number 97) celebrates with his left hand after scoring a goal.", "type": "action", "weight": 3 }, { "content": "At the end of the video, another player in white enters the picture from the right side of the picture to merge with the two white players who fell.", "type": "action", "weight": 3 }, { "content": "There are the boundary lines and billboards of the ice rink in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0742", "video_path": "VIDGEN-1M/9ifE87aOfLQ-Scene-0001.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 4.03, "resolution": { "width": 1280, "height": 720 }, "caption": "At the start of the video, a young male boxer appears on scene. He wears a red sleeveless top and white boxing shorts with red and green stripes, and a belt with the word \"TITLE\" written on it. He wears red boxing gloves. Standing opposite the boxer is a middle-aged male dressed in a red long-sleeved shirt, who wears boxing guards on his hands. In the background, a large indoor gymnasium can be seen, with iron railings separating the venue from the surrounding area. On the left side of the venue are bleachers where spectators are seated, watching the match. On the right side of the venue, several boxers and coaches dressed in boxing attire of different colors prepar or waite for their matches.\nIn the video, the boxer continuously performs boxing moves, punching, dodging, and defending. He gradually moves from the left side of the scene to the right, while the middle-aged male opposite him retreats while using his boxing guards to engage in sparring training with the boxer. Behind the boxer and the middle-aged male, a male dressed in a black T-shirt and khaki seven-point pants leans against the iron railings to watch the training. Meanwhile, another black male dressed in a black tracksuit, with a white towel draped around his neck, walks from the right side of the screen to the left while talking on the phone.", "events": [ { "event": "At the start of the video, a young male boxer appears on scene. He wears a red sleeveless top and white boxing shorts with red and green stripes, and a belt with the word \"TITLE\" written on it. He wears red boxing gloves. Standing opposite the boxer is a middle-aged male dressed in a red long-sleeved shirt, who wears boxing guards on his hands. In the background, a large indoor gymnasium can be seen, with iron railings separating the venue from the surrounding area. On the left side of the venue are bleachers where spectators are seated, watching the match. On the right side of the venue, several boxers and coaches dressed in boxing attire of different colors prepar or waite for their matches.", "visual_elements": [ { "content": "A young male boxer appears on scene.", "type": "attribute", "weight": 3 }, { "content": "The boxer wears a red sleeveless top and white boxing shorts with red and green stripes.", "type": "attribute", "weight": 3 }, { "content": "A belt with the word \"TITLE\" written on the boxer's shorts.", "type": "attribute", "weight": 2 }, { "content": "The boxer wears red boxing gloves.", "type": "attribute", "weight": 3 }, { "content": "Standing opposite the boxer is a middle-aged male dressed in a red long-sleeved shirt.", "type": "attribute", "weight": 3 }, { "content": "The middle-aged malewears boxing guards on his hands.", "type": "attribute", "weight": 3 }, { "content": "In the background, a large indoor gymnasium can be seen.", "type": "scene", "weight": 3 }, { "content": "The venue is surrounded by iron railings that serve to separate.", "type": "scene", "weight": 2 }, { "content": "On the left side of the venue are bleachers where spectators are seated.", "type": "scene", "weight": 2 }, { "content": "The spectators are seated in their seats watching the game.", "type": "scene", "weight": 2 }, { "content": "On the right side of the venue, several boxers and coaches dressed in boxing attire of different colors are preparing or waiting for their matches.", "type": "scene", "weight": 2 } ] }, { "event": "In the video, the boxer continuously performs boxing moves, punching, dodging, and defending. He gradually moves from the left side of the scene to the right, while the middle-aged male opposite him retreats while using his boxing guards to engage in sparring training with the boxer. Behind the boxer and the middle-aged male, a male dressed in a black T-shirt and khaki seven-point pants leans against the iron railings to watch the training. Meanwhile, another black male dressed in a black tracksuit, with a white towel draped around his neck, walks from the right side of the screen to the left while talking on the phone.", "visual_elements": [ { "content": "The boxer continuously performs boxing moves, punching, dodging, and defending.", "type": "action", "weight": 3 }, { "content": "The boxer gradually moves from the left side of the scene to the right.", "type": "action", "weight": 3 }, { "content": "The middle-aged male opposite him retreats while using his boxing guards to engage in sparring training with the boxer.", "type": "action", "weight": 3 }, { "content": "Behind the boxer and the middle-aged male, a male dressed in a black T-shirt and khaki seven-point pants leans against the iron railings to watch the training.", "type": "attribute", "weight": 2 }, { "content": "a male dressed in a black T-shirt leans against the iron railings to watch the boxing training.", "type": "attribute", "weight": 2 }, { "content": "Another black male dressed in a black tracksuit, with a white towel draped around his neck,", "type": "attribute", "weight": 2 }, { "content": "The black male walks from the right side of the screen to the left while talking on the phone.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0743", "video_path": "VIDGEN-1M/9p9LAJPTKO4-Scene-0004.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 11.37, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a first-person gaming shooter's perspective, with the gunman's arm and gun remaining in the lower right corner of the screen at all times, with the muzzle pointing forward. In front of the screen is an open grassy field with some trees and bushes in the distance, and the sky is overcast.\nThe camera keeps moving to the right with pauses and the shooter keeps aiming. The camera pauses for a moment and then begins to move to the left.", "events": [ { "event": "The video begins with a first-person gaming shooter's perspective, with the gunman's arm and gun remaining in the lower right corner of the screen at all times, with the muzzle pointing forward. In front of the screen is an open grassy field with some trees and bushes in the distance, and the sky is overcast.", "visual_elements": [ { "content": "The scene is a first-person view of the game's shooter.", "type": "scene", "weight": 3 }, { "content": "The gunman's arm and gun remain in the lower right corner of the screen.", "type": "action", "weight": 3 }, { "content": "In front of the screen is an open grassy field.", "type": "scene", "weight": 2 }, { "content": "There are some trees and bushes in the distance.", "type": "scene", "weight": 2 }, { "content": "The sky is overcast.", "type": "scene", "weight": 2 } ] }, { "event": "The camera keeps moving to the right with pauses and the shooter keeps aiming. The camera pauses for a moment and then begins to move to the left.", "visual_elements": [ { "content": "The camera keeps moving to the right with pauses.", "type": "scene", "weight": 1 }, { "content": "The shooter keeps aiming.", "type": "action", "weight": 3 }, { "content": "The camera pauses for a moment and then begins to move to the left.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0744", "video_path": "VIDGEN-1M/A8pqHetEVVs-Scene-0027.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Scene", "domain": "Sports Activity", "duration": 16.98, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera perspective is from a cyclist, with another cyclist riding ahead. The cyclist in front wears a dark-colored long-sleeved shirt and dark pants, and rides a blue mountain bike. The background is a dark forest, with only the bicycle lights illuminating the path ahead.\nThe camera pans the movements of the cyclist, bouncing up and down as they ride. The cyclist first turns right and then continues straight ahead. As they ride along the rugged mountain trail, the camera stays close behind, capturing the beam of the bicycle lights moving across the ground ahead. The lights illuminate stones and tree roots on the path, while tall trees surround the riders on all sides.\nAs the cyclist in front makes a left turn, the camera follows suit, rotating to the left. The cyclist then dives down a slope and then pedals up an incline, eventually disappearing from scene. The camera pans downwards as it enters the downhill section and then upwards as it ascends the uphill. After completing the climb, the camera rotates to the right, and the video comes to an end.", "events": [ { "event": "At the beginning of the video, the camera perspective is from a cyclist, with another cyclist riding ahead. The cyclist in front wears a dark-colored long-sleeved shirt and dark pants, and rides a blue mountain bike. The background is a dark forest, with only the bicycle lights illuminating the path ahead.", "visual_elements": [ { "content": "The camera perspective is from a cyclist.", "type": "camera", "weight": 3 }, { "content": "Another cyclist riding is ahead.", "type": "attribute", "weight": 3 }, { "content": "The cyclist in front wears a dark-colored long-sleeved shirt and dark pants.", "type": "attribute", "weight": 2 }, { "content": "The cyclist rides a blue mountain bike.", "type": "attribute", "weight": 2 }, { "content": "The background is a dark forest.", "type": "scene", "weight": 2 }, { "content": "Only the bicycle lights illuminating the path ahead.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans the movements of the cyclist, bouncing up and down as they ride. The cyclist first turns right and then continues straight ahead. As they ride along the rugged mountain trail, the camera stays close behind, capturing the beam of the bicycle lights moving across the ground ahead. The lights illuminate stones and tree roots on the path, while tall trees surround the riders on all sides.", "visual_elements": [ { "content": "The camera pans the movements of the cyclist, bouncing up and down as they ride.", "type": "camera", "weight": 3 }, { "content": "The cyclist first turns right and then continues straight ahead.", "type": "camera", "weight": 3 }, { "content": "The cyclist in front is riding along a rugged mountain trail.", "type": "action", "weight": 3 }, { "content": "The lights illuminate stones and tree roots on the path.", "type": "scene", "weight": 2 }, { "content": "Tall trees surround the riders on all sides.", "type": "scene", "weight": 2 } ] }, { "event": "As the cyclist in front makes a left turn, the camera follows suit, rotating to the left. The cyclist then dives down a slope and then pedals up an incline, eventually disappearing from scene. The camera pans downwards as it enters the downhill section and then upwards as it ascends the uphill. After completing the climb, the camera rotates to the right, and the video comes to an end.", "visual_elements": [ { "content": "The cyclist in front makes a left turn.", "type": "action", "weight": 3 }, { "content": "The camera follows suit, rotating to the left.", "type": "camera", "weight": 3 }, { "content": "The cyclist then dives down a slope and then pedals up an incline, eventually disappearing from scene.", "type": "action", "weight": 3 }, { "content": "The camera pans downwards as it enters the downhill section.", "type": "camera", "weight": 3 }, { "content": "The camera pans upwards as it ascends the uphill.", "type": "camera", "weight": 3 }, { "content": "After completing the climb, the camera rotates to the right, and the video comes to an end.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0745", "video_path": "VIDGEN-1M/A9C97Y6zRbI-Scene-0405.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 4.8, "resolution": { "width": 1280, "height": 720 }, "caption": "The video is an animated film. At the beginning of the video, the camera is focused on a large brown bear standing indoors. The bear is large and fluffy, with dark brown as the main color and white belly. The bear has distinct white facial features, a large black nose, and bright round eyes. The bear holds a blue magic wand in its left hand, with a golden star decoration on the top. The background is a curtain with a red checkered pattern, which is illuminated by soft lights. Behind the bear is a row of bookshelves filled with books of different colors and arrangements.\nAs the video progresses, the camera focuses on the bear's movements. The bear looks down at the magic wand in his left hand and adjusts it with his right hand.\nFinally, the camera moves slightly upwards, following the bear's movements. The bear raises the magic wand with his left hand. The tip of the magic wand suddenly glows, and the star-shaped decoration begins to flash. More books appear on top of the bookshelf in the upper left corner of the screen.", "events": [ { "event": "The video is an animated film. At the beginning of the video, the camera is focused on a large brown bear standing indoors. The bear is large and fluffy, with dark brown as the main color and white belly. The bear has distinct white facial features, a large black nose, and bright round eyes. The bear holds a blue magic wand in its left hand, with a golden star decoration on the top. The background is a curtain with a red checkered pattern, which is illuminated by soft lights. Behind the bear is a row of bookshelves filled with books of different colors and arrangements.", "visual_elements": [ { "content": "The video is an animation.", "type": "camera", "weight": 3 }, { "content": "The camera is aimed at a big brown bear standing indoors.", "type": "camera", "weight": 3 }, { "content": "The bear is large, with fluffy hair, mainly dark brown in color, and white on the belly.", "type": "attribute", "weight": 3 }, { "content": "The white facial features of the bear are obvious, with a big black nose and bright round eyes.", "type": "attribute", "weight": 2 }, { "content": "The big bear holds a blue magic wand in his left hand.", "type": "action", "weight": 3 }, { "content": "There is a golden star-shaped decoration on the top of the magic wand.", "type": "attribute", "weight": 2 }, { "content": "The background is a curtain with a red checkered pattern.", "type": "scene", "weight": 2 }, { "content": "The curtain is illuminated by soft light.", "type": "scene", "weight": 1 }, { "content": "There is also a row of bookshelves behind the bear, which are full of books.", "type": "scene", "weight": 2 }, { "content": "The books are of different colors and arrangements.", "type": "scene", "weight": 1 } ] }, { "event": "As the video progresses, the camera focuses on the bear's movements. The bear looks down at the magic wand in his left hand and adjusts it with his right hand.", "visual_elements": [ { "content": "During the video, the camera focuses on the bear's movements.", "type": "camera", "weight": 2 }, { "content": "The bear looks down at the magic wand in his left hand and adjusts it with his right hand.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera moves slightly upwards, following the bear's movements. The bear raises the magic wand with his left hand. The tip of the magic wand suddenly glows, and the star-shaped decoration begins to flash. More books appear on top of the bookshelf in the upper left corner of the screen.", "visual_elements": [ { "content": "Finally, the camera moves slightly upward along with the bear's movements.", "type": "camera", "weight": 3 }, { "content": "The bear raises the magic wand with his left hand.", "type": "action", "weight": 3 }, { "content": "The top of the magic wand suddenly emits a bright light, and the star-shaped decoration begins to flash.", "type": "action", "weight": 2 }, { "content": "More books appear on the top of the bookshelf in the upper left of the screen.", "type": "camera", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0746", "video_path": "VIDGEN-1M/AVMn8SrBd4s-Scene-0138.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 6.2, "resolution": { "width": 1280, "height": 720 }, "caption": "The video is broadcasting the news. At the beginning of the video, the camera is focused on a middle-aged Asian man wearing glasses, a black suit, a white shirt and a red tie. He is standing behind a glass door with Korean words and some colorful patterns printed on it. The indoor scene in the background is blurry and the light is bright. In the lower right corner of the picture, there is a woman in a black shirt doing sign language.\nThe camera slowly moves to the left, and the left side of the glass door gradually enters the frame. The man pushes the metal handle of the glass door with his right hand and walks out.", "events": [ { "event": "The video is broadcasting the news. At the beginning of the video, the camera is focused on a middle-aged Asian man wearing glasses, a black suit, a white shirt and a red tie. He is standing behind a glass door with Korean words and some colorful patterns printed on it. The indoor scene in the background is blurry and the light is bright. In the lower right corner of the picture, there is a woman in a black shirt doing sign language.", "visual_elements": [ { "content": "The video is broadcasting the news.", "type": "camera", "weight": 3 }, { "content": "The camera is focused on a middle-aged Asian man.", "type": "camera", "weight": 3 }, { "content": "The man is wearing glasses, a black suit, a white shirt and a red tie.", "type": "attribute", "weight": 3 }, { "content": "The man is standing behind a glass door.", "type": "action", "weight": 3 }, { "content": "The glass door is printed with Korean words and some colorful patterns.", "type": "attribute", "weight": 2 }, { "content": "The indoor scene in the background is blurry and the light is bright.", "type": "scene", "weight": 3 }, { "content": "There is a woman in a black shirt doing sign language in the lower right corner of the picture.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera slowly moves to the left, and the left side of the glass door gradually enters the frame. The man pushes the metal handle of the glass door with his right hand and walks out.", "visual_elements": [ { "content": "The camera moves slowly to the left.", "type": "camera", "weight": 3 }, { "content": "The left side of the glass door gradually enters the picture.", "type": "attribute", "weight": 1 }, { "content": "The man pushes the metal handle of the glass door with his right hand and walks out.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0747", "video_path": "VIDGEN-1M/AdJhAy708w8-Scene-0015.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 17.64, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a young white woman standing on the street. The woman is wearing a pink helmet, a blue undershirt, a light blue denim jacket, and light blue jeans. The woman's black hair is braided and tied with a green headband. She stands in the middle of the street with her hands on her hips and speaks to the camera. The background is a quiet residential street under a white sky. There are some single-family houses on both sides of the street, with lawns and flower beds in front of the houses. The house on the left side of the picture is blue. White and black SUVs are parked on both sides of the street.\nThen, the camera moves slightly to the left. The camera maintains a frontal view of the woman. The woman gestures as she speaks, with her left hand on her waist and her right hand raised.\nFinally, the camera starts to move backwards following the woman's movements. The woman walks forward while talking. The woman's gestures keep changing. Her left hand drops from her waist and both hands start to gesture in front of her chest. The background is a straight and neat street. The camera keeps shaking throughout the video.", "events": [ { "event": "The video begins with the camera focusing on a young white woman standing on the street. The woman is wearing a pink helmet, a blue undershirt, a light blue denim jacket, and light blue jeans. The woman's black hair is braided and tied with a green headband. She stands in the middle of the street with her hands on her hips and speaks to the camera. The background is a quiet residential street under a white sky. There are some single-family houses on both sides of the street, with lawns and flower beds in front of the houses. The house on the left side of the picture is blue. White and black SUVs are parked on both sides of the street.", "visual_elements": [ { "content": "The camera is focused on a young white woman standing on the street.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a pink helmet, a blue shirt, a light blue denim jacket, and light blue jeans.", "type": "attribute", "weight": 2 }, { "content": "The woman's black hair is braided and tied with a green headband.", "type": "attribute", "weight": 1 }, { "content": "The woman stands in the middle of the street with her hands on her hips and speaks to the camera.", "type": "action", "weight": 3 }, { "content": "The background is a quiet residential street.", "type": "scene", "weight": 3 }, { "content": "The sky is white.", "type": "scene", "weight": 1 }, { "content": "There are some single-family houses on both sides of the street.", "type": "scene", "weight": 2 }, { "content": "There are lawns and flower beds in front of the houses.", "type": "scene", "weight": 1 }, { "content": "The house on the left side of the picture is blue.", "type": "scene", "weight": 1 }, { "content": "The street is lined with white and black SUVs.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera moves slightly to the left. The camera maintains a frontal view of the woman. The woman gestures as she speaks, with her left hand on her waist and her right hand raised.", "visual_elements": [ { "content": "Then, the camera moves slightly to the left", "type": "camera", "weight": 3 }, { "content": "The camera keeps the front view of the woman", "type": "camera", "weight": 3 }, { "content": "The woman gestures while speaking, with her left hand on her waist and her right hand raised", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera starts to move backwards following the woman's movements. The woman walks forward while talking. The woman's gestures keep changing. Her left hand drops from her waist and both hands start to gesture in front of her chest. The background is a straight and neat street. The camera keeps shaking throughout the video.", "visual_elements": [ { "content": "Finally, the camera starts to move backwards with the woman's movements", "type": "camera", "weight": 3 }, { "content": "The woman walks forward while speaking", "type": "action", "weight": 3 }, { "content": "The woman's gestures keep changing", "type": "action", "weight": 2 }, { "content": "The woman's left hand drops from her waist and she starts to gesture with both hands in front of her chest", "type": "action", "weight": 1 }, { "content": "The background is a straight and neat street", "type": "scene", "weight": 2 }, { "content": "The camera keeps shaking throughout the video", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0748", "video_path": "VIDGEN-1M/Aek2M2io0D4-Scene-0046.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 7.91, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a basketball player wearing a white jersey with the words \"CELTICS\" and the number \"11\" printed on his jersey. The lower right corner of the screen shows that the score of DEN is 60 points, BOS is 72 points, the game is in the third quarter, and the time is 10 minutes and 42 seconds.\nNext, the camera switches to a medium shot, in which a player wearing a blue jersey with the words \"HARRIS\" and the number \"27\" printed on it can be seen. The player in the blue jersey No. 27 moves while patting the basketball, and the player in the white jersey No. 11 on the opposite side opens his arms to block him. There are other players and referees in the background who are paying attention to the player No. 27, and the venue is the wooden floor and the green three-point line area of the basketball court. Then, the camera follows the player No. 27, and the player in the blue jersey No. 27 passes the ball to the player No. 14 on the same team. The camera moves to the left with the basketball. After the successful pass, the player No. 27 runs to the left side of the screen, followed by the player in the white jersey No. 11.\nThen the camera begins to focus on the player No. 14 in the blue jersey who gets the basketball. Opposite him is the player No. 7 in the white jersey. The player No. 14 in the blue jersey slaps the basketball vigorously to attack, and the player No. 7 in the white jersey blocks him. Then the player No. 14 breaks through the opponent's defense and comes to the green three-point line area to prepare for a shot. The camera follows the player No. 14 and he is surrounded by the opponent's players in the picture.", "events": [ { "event": "At the beginning of the video, the camera focuses on a basketball player wearing a white jersey with the words \"CELTICS\" and the number \"11\" printed on his jersey. The lower right corner of the screen shows that the score of DEN is 60 points, BOS is 72 points, the game is in the third quarter, and the time is 10 minutes and 42 seconds.", "visual_elements": [ { "content": "The camera focuses on a basketball player wearing a white jersey.", "type": "camera", "weight": 3 }, { "content": "The jersey has the words \"CELTICS\" and the number \"11\" printed on it.", "type": "attribute", "weight": 3 }, { "content": "The lower right corner of the screen shows that the score is 60 for DEN and 72 for BOS. The game is in the third quarter and the time is 10 minutes and 42 seconds.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, the camera switches to a medium shot, in which a player wearing a blue jersey with the words \"HARRIS\" and the number \"27\" printed on it can be seen. The player in the blue jersey No. 27 moves while patting the basketball, and the player in the white jersey No. 11 on the opposite side opens his arms to block him. There are other players and referees in the background who are paying attention to the player No. 27, and the venue is the wooden floor and the green three-point line area of the basketball court. Then, the camera follows the player No. 27, and the player in the blue jersey No. 27 passes the ball to the player No. 14 on the same team. The camera moves to the left with the basketball. After the successful pass, the player No. 27 runs to the left side of the screen, followed by the player in the white jersey No. 11.", "visual_elements": [ { "content": "The camera switches to a medium shot.", "type": "camera", "weight": 3 }, { "content": "A player wearing a blue jersey can be seen in the screen.", "type": "attribute", "weight": 2 }, { "content": "The blue jersey has the words \"HARRIS\" and the number \"27\" printed on it.", "type": "attribute", "weight": 2 }, { "content": "The player wearing the blue jersey No. 27 pats the basketball and moves.", "type": "action", "weight": 3 }, { "content": "The player wearing the white jersey No. 11 on the opposite side opens his arms to block.", "type": "action", "weight": 3 }, { "content": "There are other players and referees in the background who are paying attention to the player No. 27.", "type": "scene", "weight": 2 }, { "content": "The court is a basketball court with wooden floor and green Three-point line area.", "type": "scene", "weight": 2 }, { "content": "The camera moves with player No. 27.", "type": "camera", "weight": 3 }, { "content": "The player in the blue jersey No. 27 passes the ball to the player in the same team No. 14.", "type": "action", "weight": 3 }, { "content": "The camera moves to the left with the basketball.", "type": "camera", "weight": 3 }, { "content": "After successfully passing the ball, the player in the blue jersey No. 11 runs to the left of the screen.", "type": "action", "weight": 2 }, { "content": "The player in the white jersey No. 11 follows closely.", "type": "action", "weight": 1 } ] }, { "event": "Then the camera begins to focus on the player No. 14 in the blue jersey who gets the basketball. Opposite him is the player No. 7 in the white jersey. The player No. 14 in the blue jersey slaps the basketball vigorously to attack, and the player No. 7 in the white jersey blocks him. Then the player No. 14 breaks through the opponent's defense and comes to the green three-point line area to prepare for a shot. The camera follows the player No. 14 and he is surrounded by the opponent's players in the picture.", "visual_elements": [ { "content": "The camera starts to focus on the player in the blue jersey No. 14 who gets the basketball.", "type": "camera", "weight": 3 }, { "content": "Across from the player in the blue jersey No. 14 is the player in the white jersey No. 7.", "type": "attribute", "weight": 2 }, { "content": "The player in the blue jersey No. 14 slaps the basketball vigorously to attack.", "type": "action", "weight": 3 }, { "content": "The player in the white jersey No. 7 blocks.", "type": "action", "weight": 3 }, { "content": "The player in the blue jersey No. 14 breaks through the opponent's defense and comes to the green three-point line area to prepare for a shot.", "type": "action", "weight": 3 }, { "content": "The camera moves with the player in the blue jersey No. 14.", "type": "camera", "weight": 3 }, { "content": "The player in the screen is surrounded by the opponent's players.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0749", "video_path": "VIDGEN-1M/AgOI-KnD6Ag-Scene-0051.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 24.32, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a male dressed in a white long-sleeved shirt and khaki pants, with black rollerblades on his feet. He stands at the edge of a skateboarding bowl. The background is an open-air skateboarding park, surrounded by some rocks and guardrails. The overall video quality is blurry.\nThe camera pans slightly upwards, revealing that the male is wearing a black helmet as he bends down to speak towards the camera. He then squats down on the edge of the skateboarding bowl, facing to the right. Subsequently, he glides into the interior of the skateboarding bowl. The interior of the bowl is dimly lit. The camera follows the male's movements by rotating to the right.\nThe camera first zooms out to give a wider view and then zooms in as the male skates out of the skateboarding bowl. When he reaches the edge, he uses his hands to push against the ground and sits down on the edge. Subsequently, he twists his upper body backwards while lifting his legs up with force. Finally, his left foot lands first, followed closely by his right foot. The male bends forward and uses his right hand to support himself on the ground to maintain balance.", "events": [ { "event": "At the beginning of the video, the camera focuses on a male dressed in a white long-sleeved shirt and khaki pants, with black rollerblades on his feet. He stands at the edge of a skateboarding bowl. The background is an open-air skateboarding park, surrounded by some rocks and guardrails. The overall video quality is blurry.", "visual_elements": [ { "content": "The camera focuses on a male dressed in a white long-sleeved shirt and khaki pants.", "type": "camera", "weight": 3 }, { "content": "The man wears black rollerblades on his feet.", "type": "attribute", "weight": 3 }, { "content": "The man stands at the edge of a skateboarding bowl.", "type": "attribute", "weight": 3 }, { "content": "The background is an open-air skateboarding park.", "type": "scene", "weight": 3 }, { "content": "There are some rocks and guardrails around the park.", "type": "scene", "weight": 2 }, { "content": "The overall video quality is blurry.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans slightly upwards, revealing that the male is wearing a black helmet as he bends down to speak towards the camera. He then squats down on the edge of the skateboarding bowl, facing to the right. Subsequently, he glides into the interior of the skateboarding bowl. The interior of the bowl is dimly lit. The camera follows the male's movements by rotating to the right.", "visual_elements": [ { "content": "The camera pans slightly upwards.", "type": "camera", "weight": 3 }, { "content": "The man bends down to speak towards the camera.", "type": "action", "weight": 3 }, { "content": "The man is wearing a black helmet.", "type": "attribute", "weight": 3 }, { "content": "The man squats down on the edge of the skateboarding bowl, facing to the right.", "type": "action", "weight": 2 }, { "content": "The man glides into the interior of the skateboarding bowl.", "type": "action", "weight": 3 }, { "content": "The interior of the bowl is dimly lit.", "type": "attribute", "weight": 2 }, { "content": "The camera follows the male's movements by rotating to the right.", "type": "camera", "weight": 3 } ] }, { "event": "The camera first zooms out to give a wider view and then zooms in as the male skates out of the skateboarding bowl. When he reaches the edge, he uses his hands to push against the ground and sits down on the edge. Subsequently, he twists his upper body backwards while lifting his legs up with force. Finally, his left foot lands first, followed closely by his right foot. The male bends forward and uses his right hand to support himself on the ground to maintain balance.", "visual_elements": [ { "content": "The camera first zooms out to give a wider view and then zooms in.", "type": "camera", "weight": 3 }, { "content": "The man skates out of the skateboarding bowl.", "type": "action", "weight": 3 }, { "content": "When the man reaches the edge, he uses his hands to push against the ground and sits down on the edge.", "type": "action", "weight": 3 }, { "content": "Subsequently, the man twists his upper body backwards while lifting his legs up with force.", "type": "action", "weight": 3 }, { "content": "The man left foot lands first, followed closely by his right foot.", "type": "action", "weight": 3 }, { "content": "The male bends forward and uses his right hand to support himself on the ground to maintain balance.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0750", "video_path": "VIDGEN-1M/AuVYp3frf5k-Scene-0039.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 4.88, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, two male athletes dressed in judo uniforms are engaged in a match at the center of the competition arena. The athlete facing away from the camera wears a white judo uniform, while his opponent wears a blue judo uniform. In the background, the audience can be seen, seats in red chairs, focuses intently on the match.\nThe camera pans to the right. The athlete in the white judo uniform uses his left hand to grab the back collar of the blue judo uniform athlete, while his right hand controls the opponent's sleeve. The athlete in the blue judo uniform responds by tightly grasping the front collar of the white judo uniform athlete with both hands, creating a stalemate between the two. As the match progresses, the blue judo uniform athlete suddenly exerts force, breaking free from the white judo uniform athlete's hold. After a brief separation, the two athletes once again lunge towards each other. At this moment, a referee dressed in a black suit enters the frame from the right side of the screen.\nThe camera pans to the left, capturing the white judo uniform athlete using his upper body to pin down the upper body of his opponent. With a powerful thrust from his left leg, the white judo uniform athlete trips the blue judo uniform athlete to the ground. The white judo uniform athlete then presses his body weight onto the blue judo uniform athlete, forcing him to land on his back.The audience in the stands around the competition arena rises to their feet and applauds. The advertising boards in the background featuring the logos and text such as \"GCBH\" and \"JUDO\" serve as a testament to the commercial and organizational aspects of the judo competition.", "events": [ { "event": "At the beginning of the video, two male athletes dressed in judo uniforms are engaged in a match at the center of the competition arena. The athlete facing away from the camera wears a white judo uniform, while his opponent wears a blue judo uniform. In the background, the audience can be seen, seats in red chairs, focuses intently on the match.", "visual_elements": [ { "content": "Two male athletes dressed in judo uniforms are engaged in a match at the center of the competition arena.", "type": "attribute", "weight": 3 }, { "content": "The athlete facing away from the camera wears a white judo uniform.", "type": "attribute", "weight": 3 }, { "content": "The athlete facing the one in the white judo uniform is wearing a blue judo uniform.", "type": "attribute", "weight": 3 }, { "content": "In the background, the audience can be seen.", "type": "scene", "weight": 2 }, { "content": "The audience seats in red chairs, focuses intently on the match.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans to the right. The athlete in the white judo uniform uses his left hand to grab the back collar of the blue judo uniform athlete, while his right hand controls the opponent's sleeve. The athlete in the blue judo uniform responds by tightly grasping the front collar of the white judo uniform athlete with both hands, creating a stalemate between the two. As the match progresses, the blue judo uniform athlete suddenly exerts force, breaking free from the white judo uniform athlete's hold. After a brief separation, the two athletes once again lunge towards each other. At this moment, a referee dressed in a black suit enters the frame from the right side of the screen.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The athlete in the white judo uniform uses his left hand to grab the back collar of the blue judo uniform athlete, while his right hand controls the opponent's sleeve.", "type": "action", "weight": 3 }, { "content": "The athlete in the blue judo uniform responds by tightly grasping the front collar of the white judo uniform athlete with both hands.", "type": "action", "weight": 3 }, { "content": "The two athletes are indeed mutually restrained or in a stalemate.", "type": "action", "weight": 2 }, { "content": "As the match progresses, the blue judo uniform athlete suddenly exerts force, breaking free from the white judo uniform athlete's hold.", "type": "action", "weight": 3 }, { "content": "The two athletes are momentarily separate from each other.", "type": "action", "weight": 3 }, { "content": "The two athletes once again lunge towards each other.", "type": "action", "weight": 3 }, { "content": "A referee dressed in a black suit enters the frame from the right side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans to the left, capturing the white judo uniform athlete using his upper body to pin down the upper body of his opponent. With a powerful thrust from his left leg, the white judo uniform athlete trips the blue judo uniform athlete to the ground. The white judo uniform athlete then presses his body weight onto the blue judo uniform athlete, forcing him to land on his back.The audience in the stands around the competition arena rises to their feet and applauds. The advertising boards in the background featuring the logos and text such as \"GCBH\" and \"JUDO\" serve as a testament to the commercial and organizational aspects of the judo competition.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The white judo uniform athlete using his upper body to pin down the upper body of his opponent.", "type": "action", "weight": 3 }, { "content": "With a powerful thrust from his left leg, the white judo uniform athlete trips the blue judo uniform athlete to the ground.", "type": "action", "weight": 3 }, { "content": "The white judo uniform athlete then presses his body weight onto the blue judo uniform athlete, forcing him to land on his back.", "type": "action", "weight": 3 }, { "content": "The audience in the stands around the competition arena rises to their feet and applauds.", "type": "scene", "weight": 2 }, { "content": "The advertising boards in the background featuring the logos and text such as \"GCBH\" and \"JUDO\" serve as a testament to the commercial and organizational aspects of the judo competition.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0751", "video_path": "VIDGEN-1M/BL0M9VChVJU-Scene-0048.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 11.34, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a chocolate chip cookie placed in an aluminum foil tray. The close-up shot of the camera shows the chocolate chips evenly distributed on the surface of the cookie. The background is blurry. A silver knife is placed on the upper right of the chocolate chip cookie.\nThe camera zooms in. The knife starts to gently cut into the surface of the cookie, and a shallow cut mark appears on the surface of the cookie. The camera moves slightly to the left following the movement of the knife, and the knife makes another obvious cut on the surface of the cookie. The camera continues to zoom in.", "events": [ { "event": "At the beginning of the video, the camera is focused on a chocolate chip cookie placed in an aluminum foil tray. The close-up shot of the camera shows the chocolate chips evenly distributed on the surface of the cookie. The background is blurry. A silver knife is placed on the upper right of the chocolate chip cookie.", "visual_elements": [ { "content": "The camera is focused on a chocolate chip cookie placed in an aluminum foil tray.", "type": "camera", "weight": 3 }, { "content": "The close-up shot of the camera shows the chocolate chips evenly distributed on the surface of the cookie.", "type": "camera", "weight": 3 }, { "content": "The background is blurry.", "type": "scene", "weight": 1 }, { "content": "A silver knife is placed on the upper right of the chocolate chip cookie.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera zooms in. The knife starts to gently cut into the surface of the cookie, and a shallow cut mark appears on the surface of the cookie. The camera moves slightly to the left following the movement of the knife, and the knife makes another obvious cut on the surface of the cookie. The camera continues to zoom in.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The knife starts to gently cut into the surface of the cookie.", "type": "action", "weight": 3 }, { "content": "A shallow cut mark appears on the surface of the cookie.", "type": "attribute", "weight": 2 }, { "content": "The camera moves slightly to the left following the movement of the knife.", "type": "camera", "weight": 3 }, { "content": "The knife makes another obvious cut on the surface of the cookie.", "type": "action", "weight": 3 }, { "content": "The camera continues to zoom in.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0752", "video_path": "VIDGEN-1M/BLEwIq4CYHk-Scene-0003.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 19.96, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on two male individuals standing on a boxing platform, with the audience seats and bright lights visible in the background. The bald male in front is wearing a white short-sleeved shirt, with a black bow tie tied at the neck. Behind and to the right of the bald male, another male is dressed in a white suit.\nThe camera then cuts to a wider angle, showcasing the entire boxing arena. Surrounding the arena are rows of spectators, who are eagerly anticipating the event. Above the boxing platform, a massive circular lighting fixture illuminates the entire area.There are a few people moving around on the boxing platform.\nAs the camera zooms in, a boxer wearing red shorts at the bottom right corner of the boxing platform. He engages in a fist bump with the audience below, followed by an emotional embrace and a cheek kiss goodbye with his coach. The coach then exits from the right side of the boxing platform.Meanwhile, another boxer dressed in blue shorts stands diagonally opposite, also bidding farewell to his coach with a hug. In the center of the boxing platform stands the referee, dressed in a white short-sleeved shirt, ready to oversee the proceedings.\nThe camera zooms in and rotates to the left, revealing subtitles in the lower middle part of the scene that display the information of the two boxers participating in the match. The coach of the boxer in blue shorts, along with other unrelated personnel, exit the boxing platform. Only the two boxers and the referee remain on the platform. The two boxers stand opposite each other, both constantly bouncing on their feet to warm up.", "events": [ { "event": "At the beginning of the video, the camera focuses on two male individuals standing on a boxing platform, with the audience seats and bright lights visible in the background. The bald male in front is wearing a white short-sleeved shirt, with a black bow tie tied at the neck. Behind and to the right of the bald male, another male is dressed in a white suit.", "visual_elements": [ { "content": "The camera focuses on two male individuals standing on a boxing platform.", "type": "camera", "weight": 3 }, { "content": "The audience seats and bright lights visible in the background.", "type": "scene", "weight": 2 }, { "content": "The bald male in front is wearing a white short-sleeved shirt.", "type": "attribute", "weight": 3 }, { "content": "The male in front is a black bow tie tied at the neck.", "type": "attribute", "weight": 2 }, { "content": "Behind and to the right of the bald male, another male is dressed in a white suit.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera then cuts to a wider angle, showcasing the entire boxing arena. Surrounding the arena are rows of spectators, who are eagerly anticipating the event. Above the boxing platform, a massive circular lighting fixture illuminates the entire area.There are a few people moving around on the boxing platform.", "visual_elements": [ { "content": "The camera then cuts to a wider angle, showcasing the entire boxing arena.", "type": "camera", "weight": 3 }, { "content": "Surrounding the arena are rows of spectators.", "type": "scene", "weight": 2 }, { "content": "Above the boxing platform, a massive circular lighting fixture illuminates the entire area.", "type": "scene", "weight": 2 }, { "content": "There are a few people moving around on the boxing platform.", "type": "attribute", "weight": 3 } ] }, { "event": "As the camera zooms in, a boxer wearing red shorts at the bottom right corner of the boxing platform. He engages in a fist bump with the audience below, followed by an emotional embrace and a cheek kiss goodbye with his coach. The coach then exits from the right side of the boxing platform.Meanwhile, another boxer dressed in blue shorts stands diagonally opposite, also bidding farewell to his coach with a hug. In the center of the boxing platform stands the referee, dressed in a white short-sleeved shirt, ready to oversee the proceedings.", "visual_elements": [ { "content": "As the camera zooms in.", "type": "camera", "weight": 3 }, { "content": "A boxer wearing red shorts at the bottom right corner of the boxing platform.", "type": "attribute", "weight": 3 }, { "content": "A boxer wearing red shorts engages in a fist bump with the audience below.", "type": "action", "weight": 3 }, { "content": "The boxer in red shorts embraces his coach and bids farewell with a kiss on the cheek.", "type": "action", "weight": 3 }, { "content": "The coach then exits from the right side of the boxing platform.", "type": "action", "weight": 3 }, { "content": "Another boxer dressed in blue shorts stands diagonally opposite, also bidding farewell to his coach with a hug.", "type": "action", "weight": 3 }, { "content": "In the center of the boxing platform stands the referee, dressed in a white short-sleeved shirt.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera zooms in and rotates to the left, revealing subtitles in the lower middle part of the scene that display the information of the two boxers participating in the match. The coach of the boxer in blue shorts, along with other unrelated personnel, exit the boxing platform. Only the two boxers and the referee remain on the platform. The two boxers stand opposite each other, both constantly bouncing on their feet to warm up.", "visual_elements": [ { "content": "The camera zooms in and rotates to the left.", "type": "camera", "weight": 3 }, { "content": "Subtitles appear in the lower middle part of the screen, displaying the information of the two contestants in the match.", "type": "scene", "weight": 1 }, { "content": "The coach of the boxer in blue shorts, along with other unrelated personnel, exit the boxing platform.", "type": "action", "weight": 2 }, { "content": "Only the two boxers and the referee remain on the platform.", "type": "attribute", "weight": 3 }, { "content": "The two boxers stand opposite each other, both constantly bouncing on their feet to warm up.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0753", "video_path": "VIDGEN-1M/BQLy0btTRe0-Scene-0044.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 18.53, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a black-framed aquarium with several fish and some red coral reefs. The bottom of the aquarium is covered with white sand. The fish are of different colors and types. The background is blue water. Outside the aquarium is a little boy in black clothes, bending over, holding a black net bag with a yellow handle in his right hand, and stretching his arm above the aquarium.\nThe camera moves downwards, and the black frame of the aquarium disappears at the top of the screen. A black net moves in the water, catching a black fish with white spots. The fish swims quickly around the coral reef, and the net tries to catch it several times but fails. A corner of another aquarium appears on the right side of the screen.\nFinally, the camera moves upwards with the net bag. The net bag successfully catches the fish and pulls it out of the water. The black frame of the aquarium appears again.", "events": [ { "event": "At the beginning of the video, the camera is focused on a black-framed aquarium with several fish and some red coral reefs. The bottom of the aquarium is covered with white sand. The fish are of different colors and types. The background is blue water. Outside the aquarium is a little boy in black clothes, bending over, holding a black net bag with a yellow handle in his right hand, and stretching his arm above the aquarium.", "visual_elements": [ { "content": "The camera is pointed at a black-framed aquarium.", "type": "camera", "weight": 3 }, { "content": "There are several fish and some red coral reefs in the aquarium.", "type": "attribute", "weight": 3 }, { "content": "The bottom of the aquarium is covered with white sand.", "type": "attribute", "weight": 1 }, { "content": "The fish are of different colors and types.", "type": "attribute", "weight": 2 }, { "content": "The background is blue water.", "type": "scene", "weight": 3 }, { "content": "There is a little boy wearing black clothes outside the aquarium.", "type": "attribute", "weight": 3 }, { "content": "The boy holds a black net bag with a yellow handle in his right hand.", "type": "attribute", "weight": 2 }, { "content": "The boy extends his right arm above the aquarium.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves downwards, and the black frame of the aquarium disappears at the top of the screen. A black net moves in the water, catching a black fish with white spots. The fish swims quickly around the coral reef, and the net tries to catch it several times but fails. A corner of another aquarium appears on the right side of the screen.", "visual_elements": [ { "content": "The camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The black frame of the aquarium disappears at the top of the screen.", "type": "attribute", "weight": 1 }, { "content": "The black net bag moves in the water to catch a black fish with white spots.", "type": "action", "weight": 3 }, { "content": "The fish swims quickly around the coral reefs, and the net bag tries to catch it several times but fails.", "type": "action", "weight": 2 }, { "content": "A corner of another aquarium appears on the right side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera moves upwards with the net bag. The net bag successfully catches the fish and pulls it out of the water. The black frame of the aquarium appears again.", "visual_elements": [ { "content": "Finally, the camera moves upward with the net bag.", "type": "camera", "weight": 3 }, { "content": "The net bag successfully catches the fish and pulls it out of the water.", "type": "action", "weight": 3 }, { "content": "The black frame of the aquarium appears again.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0754", "video_path": "VIDGEN-1M/Bxsns_UZ-zc-Scene-0006.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 11.58, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera looks down at the entire baseball field. There are several players wearing yellow shirts and white pants on the field. The pitcher stands on the pitcher's mound and prepares to pitch. The score and speed information of the game are displayed in the lower right corner of the screen.\nThe camera moves forward and quickly focuses on the baseball player No. 48 in a yellow jersey wearing a green and yellow-brimmed baseball cap. He rolls on the pitcher's mound for a few rounds and then quickly stands up and starts pitching. The pitcher No. 48 throws a 91-mile-per-hour fastball, which flies over the green grass to the right field of vision. The camera also moves with the ball. In the picture, you can see the infielder who is also wearing a yellow shirt and white pants and a green baseball cap standing in the red zone. He lowers his center of gravity and arches his left leg. After catching the ball, he stands up and walks to his left. In front of the infielder to the right is a referee wearing a black shirt and a black cap. At the same time, when the infielder catches the ball, there are two players wearing gray jerseys and black baseball caps behind him. One player runs to the right along the white line of the red zone, and the other stands on the green grass with his hands on his waist. There are various billboards and red and green game areas in the background.\nThe camera switches to another angle to show the pitching and catching process. The player wearing a yellow jersey, white pants and a green baseball cap is running while leaning to the left to catch the ball. The camera quickly zooms in. The number 48 is written on the player's jersey. After catching the ball, the pitcher No. 48 lies prone in the red zone, rolls, and then stands up. The infielder No. 17 in the distance successfully catches the ball. Behind the infielder is a player wearing a gray jersey running and another player in a gray jersey standing on the green grass with his hands on his waist.", "events": [ { "event": "At the beginning of the video, the camera looks down at the entire baseball field. There are several players wearing yellow shirts and white pants on the field. The pitcher stands on the pitcher's mound and prepares to pitch. The score and speed information of the game are displayed in the lower right corner of the screen.", "visual_elements": [ { "content": "The camera looks down at the entire baseball field.", "type": "camera", "weight": 3 }, { "content": "There are several players wearing yellow shirts and white pants on the field.", "type": "attribute", "weight": 2 }, { "content": "The pitcher stands on the pitcher's mound ready to pitch.", "type": "action", "weight": 3 }, { "content": "The score and speed information of the game are displayed in the lower right corner of the screen.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves forward and quickly focuses on the baseball player No. 48 in a yellow jersey wearing a green and yellow-brimmed baseball cap. He rolls on the pitcher's mound for a few rounds and then quickly stands up and starts pitching. The pitcher No. 48 throws a 91-mile-per-hour fastball, which flies over the green grass to the right field of vision. The camera also moves with the ball. In the picture, you can see the infielder who is also wearing a yellow shirt and white pants and a green baseball cap standing in the red zone. He lowers his center of gravity and arches his left leg. After catching the ball, he stands up and walks to his left. In front of the infielder to the right is a referee wearing a black shirt and a black cap. At the same time, when the infielder catches the ball, there are two players wearing gray jerseys and black baseball caps behind him. One player runs to the right along the white line of the red zone, and the other stands on the green grass with his hands on his waist. There are various billboards and red and green game areas in the background.", "visual_elements": [ { "content": "The camera moves forward and quickly focuses on the baseball player wearing a yellow jersey with the number 48 and a green baseball cap with a yellow brim.", "type": "camera", "weight": 3 }, { "content": "Pitcher No. 48 rolls on the pitcher's mound for a few rounds and then quickly stands up and starts pitching.", "type": "action", "weight": 3 }, { "content": "Pitcher No. 48 throws a 91 mph fastball.", "type": "attribute", "weight": 2 }, { "content": "The ball flies over the green grass to the right side of the field of vision.", "type": "action", "weight": 3 }, { "content": "The camera also moves with the ball.", "type": "camera", "weight": 3 }, { "content": "In the picture, the infielder, who is also wearing a yellow shirt, white pants and a green baseball cap, stands on the red zone.", "type": "attribute", "weight": 3 }, { "content": "The infielder lowers his center of gravity and arches his left leg.", "type": "action", "weight": 3 }, { "content": "After catching the ball, the infielder stands up and walks to his left.", "type": "action", "weight": 2 }, { "content": "In front of the right of the infielder is a man wearing a black shirt and a The referee in the black baseball cap.", "type": "attribute", "weight": 2 }, { "content": "When the infielder caught the ball, there are two players wearing gray jerseys and black baseball caps behind him.", "type": "attribute", "weight": 2 }, { "content": "One player runs to the right along the white line in the red zone.", "type": "action", "weight": 2 }, { "content": "Another player stands on the green grass with his hands on his waist.", "type": "attribute", "weight": 2 }, { "content": "There are various billboards and the red and green playing area in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to another angle to show the pitching and catching process. The player wearing a yellow jersey, white pants and a green baseball cap is running while leaning to the left to catch the ball. The camera quickly zooms in. The number 48 is written on the player's jersey. After catching the ball, the pitcher No. 48 lies prone in the red zone, rolls, and then stands up. The infielder No. 17 in the distance successfully catches the ball. Behind the infielder is a player wearing a gray jersey running and another player in a gray jersey standing on the green grass with his hands on his waist.", "visual_elements": [ { "content": "The camera switches to another angle to show the pitching and catching process on the screen.", "type": "camera", "weight": 3 }, { "content": "The player wearing a yellow jersey, white pants and a green baseball cap runs while leaning to the left to catch the ball.", "type": "action", "weight": 3 }, { "content": "The camera quickly zooms in.", "type": "camera", "weight": 3 }, { "content": "The number 48 is written on the player's jersey.", "type": "attribute", "weight": 3 }, { "content": "After catching the ball, the pitcher No. 48 rolls over in the red zone and stands up.", "type": "action", "weight": 3 }, { "content": "The infielder No. 17 in the distance successfully catches the ball.", "type": "action", "weight": 3 }, { "content": "Behind the infielder, there is a player wearing a gray jersey running and another player wearing a gray jersey standing on the green grass with his hands on his waist.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 24 }, { "index": "TUNA_0755", "video_path": "VIDGEN-1M/CSrqCUdRKjg-Scene-0182.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 12.24, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a medium-sized dog. The dog's fur is mottled, mainly a mixture of brown and white, with black spots on its body. The dog lowers its head and sniffs the ground. The background is a dry meadow, covered with golden hay and interspersed with some green plants. The sun is bright, and the light shines through the gaps in the plants onto the ground and the dog's body, forming mottled light and shadows.\nThen, the camera moves slightly to the right with the dog. The dog finds a piece of meat and picks it up and chews it facing the camera. There is a shadow of the plant on the dog's face.\nFinally, the camera moves slightly downward. The dog finishes chewing the meat, lowers its head and walks to the right of the screen, sniffing the ground. The shadow of the dog on the ground gradually grows larger.", "events": [ { "event": "At the beginning of the video, the camera focuses on a medium-sized dog. The dog's fur is mottled, mainly a mixture of brown and white, with black spots on its body. The dog lowers its head and sniffs the ground. The background is a dry meadow, covered with golden hay and interspersed with some green plants. The sun is bright, and the light shines through the gaps in the plants onto the ground and the dog's body, forming mottled light and shadows.", "visual_elements": [ { "content": "The camera focuses on a medium-sized dog.", "type": "camera", "weight": 3 }, { "content": "The dog's fur is mottled, mainly a mixture of brown and white.", "type": "attribute", "weight": 3 }, { "content": "There are black spots on the dog's body.", "type": "attribute", "weight": 1 }, { "content": "The dog lowers its head and sniffs around on the ground.", "type": "action", "weight": 3 }, { "content": "The background is a dry meadow.", "type": "scene", "weight": 3 }, { "content": "The ground is covered with golden hay.", "type": "scene", "weight": 2 }, { "content": "There are some green plants interspersed in the hay.", "type": "scene", "weight": 1 }, { "content": "The light shines through the gaps in the plants onto the ground and the dog's body, forming mottled light and shadow.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves slightly to the right with the dog. The dog finds a piece of meat and picks it up and chews it facing the camera. There is a shadow of the plant on the dog's face.", "visual_elements": [ { "content": "Then, the camera moves slightly to the right with the dog.", "type": "camera", "weight": 3 }, { "content": "The dog finds a piece of meat and picks it up and chews it facing the camera.", "type": "action", "weight": 3 }, { "content": "The shadow of the plant is on the dog's face.", "type": "scene", "weight": 2 } ] }, { "event": "Finally, the camera moves slightly downward. The dog finishes chewing the meat, lowers its head and walks to the right of the screen, sniffing the ground. The shadow of the dog on the ground gradually grows larger.", "visual_elements": [ { "content": "Finally, the camera moves slightly downward.", "type": "camera", "weight": 3 }, { "content": "After chewing the meat, the dog lowers its head and walks to the right of the screen and sniffs around on the ground.", "type": "action", "weight": 3 }, { "content": "The shadow of the dog on the ground gradually grows.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0756", "video_path": "VIDGEN-1M/Cv64E2PfmD0-Scene-0129.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 17.38, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focuses on an indoor equestrian training arena. The space is wide and the ground is covered with sand, surrounded by wooden fences and supporting structures. Some light filters through the ceiling, illuminating the entire area. A rider dressed in a gray top and black vest is seating on the back of a brown horse. The rider wears a black helmet, black riding pants, and boots. The horse's saddle pad is purple.\nThe camera zooms in as the rider, mounts on the horse, slowly proceeds along the left fence of the arena. As the horse walks, some dust rises from the ground. The rider maintains a steady posture, gripping the reins with both hands and controlling the direction of the horse.\nThe rider and the horse makes two right turns to reach the right side of the arena, with the camera zooming in and panning to the right. On the right side of the arena, there are some obstacles and jumps. The rider guides the horse over a low jump. The camera stays focused on the rider and the horse as they navigate the jump and proceed into the next right turn.", "events": [ { "event": "At the beginning of the video, the camera is focuses on an indoor equestrian training arena. The space is wide and the ground is covered with sand, surrounded by wooden fences and supporting structures. Some light filters through the ceiling, illuminating the entire area. A rider dressed in a gray top and black vest is seating on the back of a brown horse. The rider wears a black helmet, black riding pants, and boots. The horse's saddle pad is purple.", "visual_elements": [ { "content": "The camera is focuses on an indoor equestrian training arena.", "type": "camera", "weight": 3 }, { "content": "The space is wide and the ground is covered with sand.", "type": "attribute", "weight": 2 }, { "content": "The surroundings are enclosed by wooden fences and supporting structures.", "type": "scene", "weight": 2 }, { "content": "Some light filters through the ceiling, illuminating the entire area.", "type": "scene", "weight": 2 }, { "content": "A rider dressed in a gray top and black vest is seating on the back of a brown horse.", "type": "attribute", "weight": 3 }, { "content": "The rider wears a black helmet, black riding pants, and boots.", "type": "attribute", "weight": 3 }, { "content": "The horse's saddle pad is purple.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera zooms in as the rider, mounts on the horse, slowly proceeds along the left fence of the arena. As the horse walks, some dust rises from the ground. The rider maintains a steady posture, gripping the reins with both hands and controlling the direction of the horse.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "the rider, mounts on the horse, slowly proceeds along the left fence of the arena.", "type": "action", "weight": 3 }, { "content": "As the horse walks, some dust rises from the ground.", "type": "attribute", "weight": 2 }, { "content": "The rider maintains a steady posture, gripping the reins with both hands and controlling the direction of the horse.", "type": "attribute", "weight": 3 } ] }, { "event": "The rider and the horse makes two right turns to reach the right side of the arena, with the camera zooming in and panning to the right. On the right side of the arena, there are some obstacles and jumps. The rider guides the horse over a low jump. The camera stays focused on the rider and the horse as they navigate the jump and proceed into the next right turn.", "visual_elements": [ { "content": "The rider and the horse makes two right turns to reach the right side of the arena.", "type": "action", "weight": 3 }, { "content": "The camera zooming in and panning to the right.", "type": "camera", "weight": 3 }, { "content": "On the right side of the arena, there are some obstacles and jumps.", "type": "attribute", "weight": 2 }, { "content": "The rider guides the horse over a low jump.", "type": "action", "weight": 3 }, { "content": "The camera stays focused on the rider and the horse as they navigate the jump and proceed into the next right turn.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0757", "video_path": "VIDGEN-1M/Dc2loz8nySQ-Scene-0007.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 6.21, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a male wearing a black T-shirt and black pants is riding an orange BMX bicycle. In front of him is a flower bed with a slope, planted with green plants and red flowers. Part of the red signboard of a shop behind the flower bed appears on the left side of the scene. Many vehicles are parked on both sides of the road in the background.\nAt the start of the video, the camera pans to the left as the man rides his bicycle up the slope of the flower bed. The front wheel of the bicycle gets stuck on the edge of the flower bed, and the man gets off the bicycle. He moves his right leg outward.\nThe camera pans to the right as the man, with both hands on the handlebars, moves the bicycle to the flat ground in front of the flower bed. He jumps down directly from the flower bed, landing his left foot on the ground and his right foot on the right pedal of the bicycle. The video pauses as the man prepares to continue riding his bicycle towards the right rear of the scene.", "events": [ { "event": "At the beginning of the video, a male wearing a black T-shirt and black pants is riding an orange BMX bicycle. In front of him is a flower bed with a slope, planted with green plants and red flowers. Part of the red signboard of a shop behind the flower bed appears on the left side of the scene. Many vehicles are parked on both sides of the road in the background.", "visual_elements": [ { "content": "A male wearing a black T-shirt and black pants is riding an orange BMX bicycle.", "type": "attribute", "weight": 3 }, { "content": "In front of the man is a flower bed with a slope.", "type": "attribute", "weight": 2 }, { "content": "There have green plants and red flowers on the flower bed.", "type": "scene", "weight": 2 }, { "content": "Part of the red signboard of a shop behind the flower bed appears on the left side of the scene.", "type": "scene", "weight": 1 }, { "content": "Many vehicles are parked on both sides of the road in the background.", "type": "scene", "weight": 2 } ] }, { "event": "At the start of the video, the camera pans to the left as the man rides his bicycle up the slope of the flower bed. The front wheel of the bicycle gets stuck on the edge of the flower bed, and the man gets off the bicycle. He moves his right leg outward.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The man rides his bicycle up the slope of the flower bed.", "type": "action", "weight": 3 }, { "content": "The front wheel of the bicycle gets stuck on the edge of the flower bed.", "type": "attribute", "weight": 3 }, { "content": "The man gets off the bicycle.", "type": "action", "weight": 3 }, { "content": "The man moves his right leg outward.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans to the right as the man, with both hands on the handlebars, moves the bicycle to the flat ground in front of the flower bed. He jumps down directly from the flower bed, landing his left foot on the ground and his right foot on the right pedal of the bicycle. The video pauses as the man prepares to continue riding his bicycle towards the right rear of the scene.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The man use his both hands on the handlebars, moves the bicycle to the flat ground in front of the flower bed.", "type": "action", "weight": 3 }, { "content": "The man jumps down directly from the flower bed.", "type": "action", "weight": 3 }, { "content": "The man is landing his left foot on the ground and his right foot on the right pedal of the bicycle.", "type": "action", "weight": 3 }, { "content": "The video pauses as the man prepares to continue riding his bicycle towards the right rear of the scene.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0758", "video_path": "VIDGEN-1M/EFRCkIZBxPU-Scene-0063.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 9.14, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an indoor venue. A pit near the camera is filled with blue foam blocks. The left wall of the venue is painted in gray and white. On the right side of the venue, there is a platform where a man sits on a bicycle. The man wears a dark-colored T-shirt and jeans, with red sneakers on his feet. He plays with a blue foam block in his hand. In the background, there is a slope with several people on it.\nThe camera shakes left and right. A person rides a bicycle down the slope, quickly approaching the foam block area. This person wears a light-colored T-shirt and jeans. Above the foam block area, the person performs some spinning tricks on the bicycle. At this moment, the man on the platform on the right side of the scene throws the blue foam block he is holding towards the person performing aerial tricks on the bicycle. Several white lights on the ceiling illuminate the entire venue.", "events": [ { "event": "At the beginning of the video, the camera focuses on an indoor venue. A pit near the camera is filled with blue foam blocks. The left wall of the venue is painted in gray and white. On the right side of the venue, there is a platform where a man sits on a bicycle. The man wears a dark-colored T-shirt and jeans, with red sneakers on his feet. He plays with a blue foam block in his hand. In the background, there is a slope with several people on it.", "visual_elements": [ { "content": "The camera focuses on an indoor venue.", "type": "camera", "weight": 3 }, { "content": "A pit near the camera is filled with blue foam blocks.", "type": "attribute", "weight": 2 }, { "content": "The left wall of the venue is painted in gray and white.", "type": "scene", "weight": 1 }, { "content": "On the right side of the venue, there is a platform.", "type": "scene", "weight": 2 }, { "content": "A man sits on a bicycle on the platform.", "type": "attribute", "weight": 3 }, { "content": "The man wears a dark-colored T-shirt and jeans, with red sneakers on his feet.", "type": "attribute", "weight": 2 }, { "content": "He plays with a blue foam block in his hand.", "type": "attribute", "weight": 2 }, { "content": "In the background, there is a slope.", "type": "scene", "weight": 2 }, { "content": "Several people are on the slope.", "type": "scene", "weight": 1 } ] }, { "event": "The camera shakes left and right. A person rides a bicycle down the slope, quickly approaching the foam block area. This person wears a light-colored T-shirt and jeans. Above the foam block area, the person performs some spinning tricks on the bicycle. At this moment, the man on the platform on the right side of the scene throws the blue foam block he is holding towards the person performing aerial tricks on the bicycle. Several white lights on the ceiling illuminate the entire venue.", "visual_elements": [ { "content": "The camera shakes left and right.", "type": "camera", "weight": 3 }, { "content": "A person rides a bicycle down the slope, quickly approaching the foam block area.", "type": "action", "weight": 3 }, { "content": "This person wears a light-colored T-shirt and jeans.", "type": "attribute", "weight": 2 }, { "content": "Above the foam block area, the person performs some spinning tricks on the bicycle.", "type": "action", "weight": 3 }, { "content": "The man on the platform on the right side of the scene throws the blue foam block towards the person performing aerial tricks.", "type": "action", "weight": 3 }, { "content": "Several white lights on the ceiling illuminate the entire venue.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0759", "video_path": "VIDGEN-1M/F0SkjoKuF6Y-Scene-0047.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 7.91, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera pointed at a white dog from the side, facing the left side of the frame. The dog has black spots on its back and tail. The dog is sitting on a dirt floor with pebbles and sand on the ground. The dog's hind legs are naturally bent, the front legs are straight and slightly tucked, and the tail is gently curled behind it.\nThe camera moves to the upper left and the dog's head enters the frame. The dog's head is in black and white. The land in the background becomes larger and the land at the top of the frame is covered by shadows.\nFinally, the camera moves down slightly. The dog's ears begin to twitch slightly.", "events": [ { "event": "The video begins with the camera pointed at a white dog from the side, facing the left side of the frame. The dog has black spots on its back and tail. The dog is sitting on a dirt floor with pebbles and sand on the ground. The dog's hind legs are naturally bent, the front legs are straight and slightly tucked, and the tail is gently curled behind it.", "visual_elements": [ { "content": "The camera is pointed at a white dog from the side.", "type": "camera", "weight": 3 }, { "content": "The dog is facing the left side of the frame.", "type": "attribute", "weight": 3 }, { "content": "The dog has black spots on its back and tail.", "type": "attribute", "weight": 2 }, { "content": "The dog is sitting on a patch of dirt.", "type": "action", "weight": 3 }, { "content": "The dog's hind legs are naturally bent, the front legs are straight and slightly tucked in, and the tail is gently curled behind it.", "type": "action", "weight": 2 }, { "content": "There are small stones and sand on the ground.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves to the upper left and the dog's head enters the frame. The dog's head is in black and white. The land in the background becomes larger and the land at the top of the frame is covered by shadows.", "visual_elements": [ { "content": "The camera moves to the upper left.", "type": "camera", "weight": 3 }, { "content": "The dog's black and white head enters the frame.", "type": "attribute", "weight": 3 }, { "content": "The land in the background becomes larger.", "type": "scene", "weight": 2 }, { "content": "The land at the top of the frame is covered by shadows.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera moves down slightly. The dog's ears begin to twitch slightly.", "visual_elements": [ { "content": "Finally, the camera moves slightly downward.", "type": "camera", "weight": 3 }, { "content": "The dog's ears begin to tremble slightly.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0760", "video_path": "VIDGEN-1M/F1NeoEZwlMo-Scene-0016.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 6.33, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on the upper body of a woman. The woman is wearing a red and white T-shirt with the word \"GREMLINS\" and a cartoon image printed on it. The woman's hair is black and draped on both sides of her shoulders. The woman wears a white watch on her left hand and a ring on her left ring finger. The woman's nails are painted with pink nail polish. The background is a room. There are several decorative paintings hanging on the wall on the left side of the picture. Under the decorative paintings is a dressing table with some items placed on it. The door of the room is white. There is also a TV on the right side of the picture. Under the TV is a white cabinet with some sundries in the cabinet.\nThe camera moves down slightly and focuses on the center of her T-shirt. The woman points with her left index finger to a small black animal with wings on the T-shirt. Below the small animal is a brown animal with big eyes and a pair of big ears facing forward.", "events": [ { "event": "At the beginning of the video, the camera is focused on the upper body of a woman. The woman is wearing a red and white T-shirt with the word \"GREMLINS\" and a cartoon image printed on it. The woman's hair is black and draped on both sides of her shoulders. The woman wears a white watch on her left hand and a ring on her left ring finger. The woman's nails are painted with pink nail polish. The background is a room. There are several decorative paintings hanging on the wall on the left side of the picture. Under the decorative paintings is a dressing table with some items placed on it. The door of the room is white. There is also a TV on the right side of the picture. Under the TV is a white cabinet with some sundries in the cabinet.", "visual_elements": [ { "content": "The camera is focused on a woman's upper body.", "type": "camera", "weight": 3 }, { "content": "The woman is wearing a red and white T-shirt.", "type": "attribute", "weight": 2 }, { "content": "The T-shirt has the words \"GREMLINS\" and a cartoon image printed on it.", "type": "attribute", "weight": 1 }, { "content": "The woman's hair is black and falls on both sides of her shoulders.", "type": "attribute", "weight": 1 }, { "content": "The woman wears a white watch on her left hand and a ring on her left ring finger.", "type": "attribute", "weight": 2 }, { "content": "The woman's nails are painted with pink nail polish.", "type": "attribute", "weight": 1 }, { "content": "There are several decorative paintings hanging on the wall on the left side of the picture.", "type": "scene", "weight": 1 }, { "content": "Under the decorative paintings is a dressing table with some items on it.", "type": "scene", "weight": 1 }, { "content": "The door of the room is white.", "type": "scene", "weight": 1 }, { "content": "There is also a TV on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "Under the TV is a white cabinet with some sundries in it.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves down slightly and focuses on the center of her T-shirt. The woman points with her left index finger to a small black animal with wings on the T-shirt. Below the small animal is a brown animal with big eyes and a pair of big ears facing forward.", "visual_elements": [ { "content": "The camera moves slightly downward.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the center of her T-shirt.", "type": "camera", "weight": 3 }, { "content": "The woman points with her left index finger to a small black animal with wings on the T-shirt.", "type": "action", "weight": 2 }, { "content": "Under the small animal is a brown animal with big eyes and a pair of big ears facing forward.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0761", "video_path": "VIDGEN-1M/FHCNaYwnmmo-Scene-0001.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 6.24, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a colorful butterfly. The butterfly's wings are a bright orange and gold pattern with white spots on the edges. The butterfly is perched on a small purple flower with lavender petals and green leaves around it. The background is a blurry green and yellow with many small purple flowers.\nThe camera then moves to the right until the end of the video. The butterfly's wings continue to flutter slightly, and its antennae gently move. In the background, another larger butterfly is flapping its wings in the blur.", "events": [ { "event": "The video begins with the camera focusing on a colorful butterfly. The butterfly's wings are a bright orange and gold pattern with white spots on the edges. The butterfly is perched on a small purple flower with lavender petals and green leaves around it. The background is a blurry green and yellow with many small purple flowers.", "visual_elements": [ { "content": "The camera focuses on a colorful butterfly.", "type": "camera", "weight": 3 }, { "content": "The butterfly's wings have a bright orange and gold pattern.", "type": "attribute", "weight": 3 }, { "content": "There are white spots on the edges of the butterfly's wings.", "type": "attribute", "weight": 1 }, { "content": "The butterfly is resting on a small purple flower.", "type": "attribute", "weight": 3 }, { "content": "The flower's petals are lavender and surrounded by green leaves.", "type": "attribute", "weight": 2 }, { "content": "The background is blurred green and yellow.", "type": "scene", "weight": 3 }, { "content": "There are many identical small purple flowers in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then moves to the right until the end of the video. The butterfly's wings continue to flutter slightly, and its antennae gently move. In the background, another larger butterfly is flapping its wings in the blur.", "visual_elements": [ { "content": "The camera moves to the right until the end of the video.", "type": "camera", "weight": 3 }, { "content": "The butterfly's wings continue to flutter slightly.", "type": "action", "weight": 3 }, { "content": "The butterfly's antennae gently wave.", "type": "action", "weight": 1 }, { "content": "There is another large butterfly flapping its wings in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0762", "video_path": "VIDGEN-1M/FMK-XtICE48-Scene-0048.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 7.97, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on the inside of a refrigerator with blue lighting. A person is holding a package of sausages in the left hand. The sausages are reddish-brown and wrapped in transparent packaging.\nThe camera moves back and shakes. There are labels on the packaging of the sausages, with text and pictures on them. Some beverage bottles and food bags can be seen in the background of the refrigerator.", "events": [ { "event": "At the beginning of the video, the camera is focused on the inside of a refrigerator with blue lighting. A person is holding a package of sausages in the left hand. The sausages are reddish-brown and wrapped in transparent packaging.", "visual_elements": [ { "content": "The camera is focused on the inside of a refrigerator.", "type": "camera", "weight": 3 }, { "content": "In the refrigerator is blue lighting.", "type": "scene", "weight": 1 }, { "content": "A person is holding a package of sausages in the left hand.", "type": "action", "weight": 3 }, { "content": "The sausages are reddish-brown.", "type": "attribute", "weight": 3 }, { "content": "The sausages are wrapped in transparent packaging.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera moves back and shakes. There are labels on the packaging of the sausages, with text and pictures on them. Some beverage bottles and food bags can be seen in the background of the refrigerator.", "visual_elements": [ { "content": "The camera moves back and shakes.", "type": "camera", "weight": 3 }, { "content": "There are labels on the packaging of the sausages.", "type": "attribute", "weight": 2 }, { "content": "There are text and pictures on the the labels.", "type": "attribute", "weight": 2 }, { "content": "Some beverage bottles and food bags can be seen in the background of the refrigerator.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0763", "video_path": "VIDGEN-1M/FSRgcQz2wM4-Scene-0009.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 15.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera captures two teams of players playing rugby. One team is wearing red jerseys and white shorts, and the other team is wearing green jerseys and white shorts. In the picture, the green jersey player who gets the ball is besieged by the red jersey players. The green jersey player who gets the ball jumps over the opponent who is trying to grab the ball and falls on the grass. The camera moves with it, and other green jersey players uses their bodies to protect the player who gets the ball. Then the green jersey player who falls to the ground passes the ball to teammate No. 21, who gets the ball and passes it to teammate No. 2 who is standing not far away.\nThe camera moves in the direction of the ball. After No. 2, the green jersey player wearing a red hat, lowers his body to receive the ball, he runs left and right to avoid the attack of the red jersey players. The camera moves to the left, and the picture shows that No. 2, the green jersey player, is still knocked down by the red jersey player. The green jersey players runs up to help, and finally the ball is taken away by the red jersey player who is pressed. The background is on the green playing field. In addition to the two teams of players, there is also a referee wearing a purple top and black shorts.", "events": [ { "event": "At the beginning of the video, the camera captures two teams of players playing rugby. One team is wearing red jerseys and white shorts, and the other team is wearing green jerseys and white shorts. In the picture, the green jersey player who gets the ball is besieged by the red jersey players. The green jersey player who gets the ball jumps over the opponent who is trying to grab the ball and falls on the grass. The camera moves with it, and other green jersey players uses their bodies to protect the player who gets the ball. Then the green jersey player who falls to the ground passes the ball to teammate No. 21, who gets the ball and passes it to teammate No. 2 who is standing not far away.", "visual_elements": [ { "content": "The camera captures two teams of players playing rugby.", "type": "camera", "weight": 3 }, { "content": "One team is wearing red jerseys and white shorts.", "type": "attribute", "weight": 3 }, { "content": "The other team is wearing green jerseys and white shorts.", "type": "attribute", "weight": 3 }, { "content": "The green jersey player who gets the ball is attacked by the red jersey players.", "type": "action", "weight": 3 }, { "content": "The green jersey player who gets the ball jumps over the opponent who is trying to grab the ball and falls to the grass.", "type": "action", "weight": 3 }, { "content": "The camera moves with it.", "type": "camera", "weight": 3 }, { "content": "Other green jersey players uses their bodies to protect the player who gets the ball.", "type": "attribute", "weight": 2 }, { "content": "Then the green jersey player who falls to the ground passes the ball to teammate No. 21.", "type": "action", "weight": 3 }, { "content": "The green jersey player No. 21 gets the ball and passes it to teammate No. 2 who is standing not far away.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves in the direction of the ball. After No. 2, the green jersey player wearing a red hat, lowers his body to receive the ball, he runs left and right to avoid the attack of the red jersey players. The camera moves to the left, and the picture shows that No. 2, the green jersey player, is still knocked down by the red jersey player. The green jersey players runs up to help, and finally the ball is taken away by the red jersey player who is pressed. The background is on the green playing field. In addition to the two teams of players, there is also a referee wearing a purple top and black shorts.", "visual_elements": [ { "content": "The camera moves with the direction of the ball.", "type": "camera", "weight": 3 }, { "content": "The green jersey player No. 2, wearing a red hat, lowers his body to receive the ball and runs left and right to avoid the attack of the red jersey players.", "type": "action", "weight": 3 }, { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The green jersey player No. 2 is still knocked down by the red jersey player.", "type": "action", "weight": 3 }, { "content": "The green jersey players runs up to help.", "type": "action", "weight": 2 }, { "content": "In the end, the ball is taken away by the red jersey player who is pressed down.", "type": "action", "weight": 2 }, { "content": "The background is on the green playing field.", "type": "scene", "weight": 3 }, { "content": "Except for the running The screen shows two teams of players and a referee wearing a purple shirt and black shorts.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0764", "video_path": "VIDGEN-1M/FVVnjf-7RoQ-Scene-0048.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 6.76, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an outdoor equestrian training arena surrounded by wooden fences, with dense trees beyond them. A person wearing a light-colored long-sleeved shirt and dark pants rides on the back of a brown horse, wearing a black helmet. Ahead of the horse is a row of set-up obstacles, consisting of several crossed wooden poles.\nThe camera pans to the left as the rider gently pulls on the reins, guiding the horse approaches to the obstacles and  jumps over. The camera zooms out as the horse jumps over the second obstacle and continues to pan to the left. The horse completes the jumps over all the obstacles ahead. Behind the fences in the background, there is a cluster of lush green shrubs.", "events": [ { "event": "At the beginning of the video, the camera focuses on an outdoor equestrian training arena surrounded by wooden fences, with dense trees beyond them. A person wearing a light-colored long-sleeved shirt and dark pants rides on the back of a brown horse, wearing a black helmet. Ahead of the horse is a row of set-up obstacles, consisting of several crossed wooden poles.", "visual_elements": [ { "content": "The camera focuses on an outdoor equestrian training arena.", "type": "camera", "weight": 3 }, { "content": "The arena is surrounded by wooden fences.", "type": "attribute", "weight": 2 }, { "content": "Dense trees are beyond the fences.", "type": "attribute", "weight": 2 }, { "content": "A person wearing a light-colored long-sleeved shirt and dark pants rides on the back of a brown horse, wearing a black helmet.", "type": "attribute", "weight": 3 }, { "content": "Ahead of the horse is a row of set-up obstacles.", "type": "attribute", "weight": 3 }, { "content": "The obstacles consist of several crossed wooden poles.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans to the left as the rider gently pulls on the reins, guiding the horse approaches to the obstacles and  jumps over. The camera zooms out as the horse jumps over the second obstacle and continues to pan to the left. The horse completes the jumps over all the obstacles ahead. Behind the fences in the background, there is a cluster of lush green shrubs.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The rider gently pulls on the reins.", "type": "action", "weight": 2 }, { "content": "The horse approaches the obstacles and jumps over.", "type": "action", "weight": 3 }, { "content": "The camera zooms out as the horse jumps over the second obstacle.", "type": "camera", "weight": 3 }, { "content": "The camera continues to pan to the left.", "type": "camera", "weight": 3 }, { "content": "The horse completes the jumps over all the obstacles ahead.", "type": "action", "weight": 3 }, { "content": "Behind the fences in the background, there is a cluster of lush green shrubs.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0765", "video_path": "VIDGEN-1M/Fby3xO-OZFY-Scene-0023.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 7.57, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a picture of a person dressed in red being at the edge of the water, both hands reaching into the water looking for something, with a calm water surface in the background.\nThe camera moves down slightly as this left-handed person reaches into the water with his fingers and holds a small pair of scissors between the fingers of his right hand. The left hand fumbles in the water and the fingers of the right hand pinch a piece of brown seaweed in the water, the fingers of the left hand use the scissors to cut off a small piece of seaweed, which is then raised above the surface of the water with the left hand. The camera moves upward as the hand moves, the seaweed glistens in the sunlight, and drops of water drip off the seaweed.", "events": [ { "event": "The video begins with a picture of a person dressed in red being at the edge of the water, both hands reaching into the water looking for something, with a calm water surface in the background.", "visual_elements": [ { "content": "The image is of a man in a red suit at the water's edge.", "type": "scene", "weight": 3 }, { "content": "Both hands reaching into the water looking for something,", "type": "action", "weight": 3 }, { "content": "A calm water surface in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves down slightly as this left-handed person reaches into the water with his fingers and holds a small pair of scissors between the fingers of his right hand. The left hand fumbles in the water and the fingers of the right hand pinch a piece of brown seaweed in the water, the fingers of the left hand use the scissors to cut off a small piece of seaweed, which is then raised above the surface of the water with the left hand. The camera moves upward as the hand moves, the seaweed glistens in the sunlight, and drops of water drip off the seaweed.", "visual_elements": [ { "content": "The camera moves down slightly.", "type": "camera", "weight": 3 }, { "content": "This left-handed person reaches into the water with his fingers.", "type": "action", "weight": 3 }, { "content": "This left-handed person holds a small pair of scissors between the fingers of his right hand.", "type": "action", "weight": 3 }, { "content": "The left hand fumbles in the water.", "type": "action", "weight": 3 }, { "content": "The fingers of the right hand pinch a piece of brown seaweed in the water.", "type": "action", "weight": 3 }, { "content": "The fingers of the right hand pinch a piece of brown seaweed in the water.", "type": "action", "weight": 3 }, { "content": "Then lift seaweed above the water with left hand.", "type": "action", "weight": 3 }, { "content": "The camera moves upward as the hand moves.", "type": "camera", "weight": 3 }, { "content": "The seaweed glistens in the sunlight,.", "type": "scene", "weight": 2 }, { "content": "Droplets of water dripping off the seaweed.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0766", "video_path": "VIDGEN-1M/FernflGCAiI-Scene-0007.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 13.44, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the screen shows a basketball game. There are two teams on the court, one wearing white jerseys and the other wearing green jerseys. There are spectators and substitutes in the background. The venue is a light-colored basketball court with patterns. There are also billboards and team logos around the venue.\nThe camera keeps a long shot. A player in a green jersey is dribbling and moving forward quickly, breaking through the defense of the player in the white jersey. The camera moves to the right following his movements. There are many players running and defending in the field. Then the screen shows that the dribbling player No. 0 bounces the ball under the basket and passes it to another player No. 1 on the same team. The No. 1 player in the green jersey jumps up with his legs and hangs on the basketball hoop to make a successful shot. Then other players run away from the hoop, and the basketball is passed to the referee by the No. 10 player in the green jersey. There are referees, cheerleaders and photography teams in the background.", "events": [ { "event": "At the beginning of the video, the screen shows a basketball game. There are two teams on the court, one wearing white jerseys and the other wearing green jerseys. There are spectators and substitutes in the background. The venue is a light-colored basketball court with patterns. There are also billboards and team logos around the venue.", "visual_elements": [ { "content": "The screen shows a basketball game.", "type": "attribute", "weight": 3 }, { "content": "There are two teams on the court competing fiercely.", "type": "attribute", "weight": 3 }, { "content": "One team is wearing white jerseys and the other is wearing green jerseys.", "type": "attribute", "weight": 3 }, { "content": "There are spectators and substitutes in the background.", "type": "scene", "weight": 2 }, { "content": "The court is on a light-colored basketball court with a pattern.", "type": "scene", "weight": 2 }, { "content": "There are billboards and team logos around the court.", "type": "scene", "weight": 1 } ] }, { "event": "The camera keeps a long shot. A player in a green jersey is dribbling and moving forward quickly, breaking through the defense of the player in the white jersey. The camera moves to the right following his movements. There are many players running and defending in the field. Then the screen shows that the dribbling player No. 0 bounces the ball under the basket and passes it to another player No. 1 on the same team. The No. 1 player in the green jersey jumps up with his legs and hangs on the basketball hoop to make a successful shot. Then other players run away from the hoop, and the basketball is passed to the referee by the No. 10 player in the green jersey. There are referees, cheerleaders and photography teams in the background.", "visual_elements": [ { "content": "The camera keeps a long shot.", "type": "camera", "weight": 3 }, { "content": "A player in a green jersey is dribbling and moving forward quickly.", "type": "action", "weight": 3 }, { "content": "The player in the green jersey breaks through the defense of the player in the white jersey.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right following the movement of the player in the green jersey.", "type": "camera", "weight": 3 }, { "content": "There are many other players running and defending on the court.", "type": "action", "weight": 2 }, { "content": "Then the screen shows that the dribbling player No. 0 bounces the ball under the basket and passes it to another player No. 1 on the same team.", "type": "action", "weight": 3 }, { "content": "The player No. 1 in the green jersey jumps up with his legs and hangs on the basketball hoop to make a successful shot.", "type": "action", "weight": 3 }, { "content": "Then other players run away from the hoop.", "type": "action", "weight": 2 }, { "content": "The basketball is passed to the referee by the player No. 10 in the green jersey.", "type": "action", "weight": 2 }, { "content": "In the background, there are referees, cheerleaders and a photography team.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0767", "video_path": "VIDGEN-1M/FpKSqJuX1K0-Scene-0009.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 6.74, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a tumultuous sea with a few white clouds in the sky. In the center of the scene is a windsurfer dressed in a black tight-fitting suit, standing on a surfboard with both hands firmly gripping the sail boom. The sail features purple and white patterns.\nThe camera follows the windsurfer as they swiftly advance through the waves, causing the surfboard to creating bursts of spray on the water's surface. The surfer takes off into the air, and the camera tilts slightly upwards to capture their mid-air action. The surfer loses their balance, with both their body and the sail leaning to the right, eventually plunging into the water and creating a splash. The sail and surfboard float on the water surface. In the background, other windsurfers and kite surfers can be seen in the distance, with kites flying high in the sky, waves rolling on the sea, and sunlight shimmering on the water.", "events": [ { "event": "At the beginning of the video, the camera focuses on a tumultuous sea with a few white clouds in the sky. In the center of the scene is a windsurfer dressed in a black tight-fitting suit, standing on a surfboard with both hands firmly gripping the sail boom. The sail features purple and white patterns.", "visual_elements": [ { "content": "The camera focuses on a tumultuous sea surface.", "type": "camera", "weight": 3 }, { "content": "There are a few white clouds in the sky.", "type": "scene", "weight": 2 }, { "content": "In the center of the scene is a windsurfer dressed in a black tight-fitting suit.", "type": "attribute", "weight": 3 }, { "content": "The windsurfer stand on a surfboard, and their hands are tightly gripping the sail pole.", "type": "attribute", "weight": 3 }, { "content": "The sail features purple and white patterns.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera follows the windsurfer as they swiftly advance through the waves, causing the surfboard to creating bursts of spray on the water's surface. The surfer takes off into the air, and the camera tilts slightly upwards to capture their mid-air action. The surfer loses their balance, with both their body and the sail leaning to the right, eventually plunging into the water and creating a splash. The sail and surfboard float on the water surface. In the background, other windsurfers and kite surfers can be seen in the distance, with kites flying high in the sky, waves rolling on the sea, and sunlight shimmering on the water.", "visual_elements": [ { "content": "The camera follows the windsurfer as they swiftly advance.", "type": "camera", "weight": 3 }, { "content": "The windsurfer swiftly advances through the waves.", "type": "action", "weight": 3 }, { "content": "The surfboard creates bursts of spray on the water's surface.", "type": "attribute", "weight": 2 }, { "content": "The surfer takes off into the air.", "type": "action", "weight": 3 }, { "content": "The camera tilts up slightly to capture the surfer's action in mid-air.", "type": "camera", "weight": 3 }, { "content": "The surfer loses their balance.", "type": "attribute", "weight": 3 }, { "content": "Both the surfer's body and the sail lean to the right.", "type": "action", "weight": 3 }, { "content": "The surfer and the sail eventually plunge into the water, creating a splash.", "type": "action", "weight": 3 }, { "content": "The sail and surfboard float on the water's surface.", "type": "attribute", "weight": 2 }, { "content": "In the background, other windsurfers and kite surfers can be seen in the distance.", "type": "scene", "weight": 2 }, { "content": "Kites fly high in the sky.", "type": "scene", "weight": 2 }, { "content": "The waves roll across the sea.", "type": "scene", "weight": 2 }, { "content": "Sunlight shimmers on the water.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0768", "video_path": "VIDGEN-1M/G0xVUbjiJqk-Scene-0150.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 8.04, "resolution": { "width": 1280, "height": 720 }, "caption": "The drone shot looks down at the entire large construction site from a high altitude. It is a drone perspective. On the left side of the picture is a flat open space with several yellow heavy machinery and equipment neatly arranged on it. On the right side of the picture is a complex dirt road system that twists and turns, forming multiple loops and turns. The site is surrounded by green farmland and some trees, and in the distance are hills and more vegetation. Several cars and yellow machinery are parked in the lower right of the picture.\nThen, the camera moves forward. A green farmland appears from the top of the screen, and on the left side of the screen is a small village surrounded by green plants.", "events": [ { "event": "The drone shot looks down at the entire large construction site from a high altitude. It is a drone perspective. On the left side of the picture is a flat open space with several yellow heavy machinery and equipment neatly arranged on it. On the right side of the picture is a complex dirt road system that twists and turns, forming multiple loops and turns. The site is surrounded by green farmland and some trees, and in the distance are hills and more vegetation. Several cars and yellow machinery are parked in the lower right of the picture.", "visual_elements": [ { "content": "The video was shot by a drone.", "type": "camera", "weight": 3 }, { "content": "The camera looks down at the entire large construction site from a high altitude.", "type": "camera", "weight": 3 }, { "content": "On the left side of the site is a flat open space with several yellow heavy machinery and equipment neatly arranged on it.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the screen is a winding dirt road system.", "type": "attribute", "weight": 2 }, { "content": "The site is surrounded by green farmland and some trees.", "type": "attribute", "weight": 1 }, { "content": "There are several cars and yellow machinery parked in the lower right of the screen.", "type": "attribute", "weight": 1 }, { "content": "In the distance are hills and more vegetation.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves forward. A green farmland appears from the top of the screen, and on the left side of the screen is a small village surrounded by green plants.", "visual_elements": [ { "content": "Then, the camera moves forward.", "type": "camera", "weight": 3 }, { "content": "A green farmland appears from the top of the screen.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the screen is a small village surrounded by greenery.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0769", "video_path": "VIDGEN-1M/GHyGi76-cFI-Scene-0032.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 7.44, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera is focused on a forest with moss and fallen leaves on the ground. In the frame, there is a young Caucasian man wearing a helmet, a gray short-sleeved T-shirt and black shorts. The man is riding a red and black mountain bike. The man is riding on a sloping path in the forest to the left of the frame. There is a shovel on the ground on the right side of the frame. The trees in the background are tall and lush.\nThen, the camera keeps a low angle and moves to the right and rear. The man rides to the dirt slope and leans forward, ready to jump. Then, the man leaves the seat to keep balance, the wheels leave the ground, and the bike jumps over the dirt slope.\nNext, the camera moves with the man's movements. The camera captures the man's rapid turns and downhill movements on the trail at close range. The wheels leave deep marks on the ground.\nAt the end of the video, the camera continues to follow the man's movements. The camera captures the man's back as he rides. The man rides to a relatively flat area. Passing trees and bushes, the dirt and fallen leaves on the ground are stirred up by the wheels.", "events": [ { "event": "The camera is focused on a forest with moss and fallen leaves on the ground. In the frame, there is a young Caucasian man wearing a helmet, a gray short-sleeved T-shirt and black shorts. The man is riding a red and black mountain bike. The man is riding on a sloping path in the forest to the left of the frame. There is a shovel on the ground on the right side of the frame. The trees in the background are tall and lush.", "visual_elements": [ { "content": "The camera is focused on a forest with moss and fallen leaves on the ground.", "type": "camera", "weight": 3 }, { "content": "There is a young Caucasian man wearing a helmet, a gray short-sleeved T-shirt and black shorts.", "type": "attribute", "weight": 3 }, { "content": "Man is riding a red and black mountain bike.", "type": "attribute", "weight": 2 }, { "content": "A man is riding his bike on a sloping path in the forest to the left of the picture.", "type": "action", "weight": 3 }, { "content": "There is a shovel on the ground on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "The trees in the background are tall and lush.", "type": "scene", "weight": 3 } ] }, { "event": "Then, the camera keeps a low angle and moves to the right and rear. The man rides to the dirt slope and leans forward, ready to jump. Then, the man leaves the seat to keep balance, the wheels leave the ground, and the bike jumps over the dirt slope.", "visual_elements": [ { "content": "Then, the camera keeps shooting at a low angle and moves to the right and rear.", "type": "camera", "weight": 3 }, { "content": "The man rides to the dirt slope and leans forward, ready to jump.", "type": "action", "weight": 2 }, { "content": "Then, the man leaves the seat to keep his balance.", "type": "action", "weight": 2 }, { "content": "Then, the wheels leave the ground and the bike jumps over the dirt slope.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera moves with the man's movements. The camera captures the man's rapid turns and downhill movements on the trail at close range. The wheels leave deep marks on the ground.", "visual_elements": [ { "content": "Next, the camera moves with the man's movements.", "type": "camera", "weight": 3 }, { "content": "The camera captures the man's movements at close range.", "type": "camera", "weight": 2 }, { "content": "The man skillfully turns and descends quickly on the trail.", "type": "action", "weight": 3 }, { "content": "The wheels leave deep marks on the ground.", "type": "attribute", "weight": 1 } ] }, { "event": "At the end of the video, the camera continues to follow the man's movements. The camera captures the man's back as he rides. The man rides to a relatively flat area. Passing trees and bushes, the dirt and fallen leaves on the ground are stirred up by the wheels.", "visual_elements": [ { "content": "At the end of the video, the camera follows the man and turns to the left and back.", "type": "camera", "weight": 3 }, { "content": "The camera captures the man's back as he rides.", "type": "camera", "weight": 2 }, { "content": "The man rides to a relatively flat area.", "type": "action", "weight": 3 }, { "content": "The dirt and fallen leaves on the ground are stirred up by the wheels as he passes through trees and bushes.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0770", "video_path": "VIDGEN-1M/GKeiH-jnr7k-Scene-0069.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 15.45, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is aimed at an open wilderness. There are two deer in the picture, one is eating grass with its head down in the lower right corner of the picture, and the other is near the edge of the woods in the center of the picture. The deer on the right side of the picture is light brown in color. The deer in the center of the picture has light gray fur and its head is held high. The background is a bare forest, and the ground is covered with withered branches and weeds.\nThen, the camera starts to move to the right and forward. The deer in the center of the frame turns its head and bites the branch on the left side of the frame. The deer in the lower right gradually disappears on the right side of the frame.\nFinally, the deer in the center of the picture has white spots on its body. The deer in the center of the picture is looking up and shaking its head while biting the branch above its head.", "events": [ { "event": "At the beginning of the video, the camera is aimed at an open wilderness. There are two deer in the picture, one is eating grass with its head down in the lower right corner of the picture, and the other is near the edge of the woods in the center of the picture. The deer on the right side of the picture is light brown in color. The deer in the center of the picture has light gray fur and its head is held high. The background is a bare forest, and the ground is covered with withered branches and weeds.", "visual_elements": [ { "content": "The camera is aimed at an open wilderness.", "type": "camera", "weight": 3 }, { "content": "There are two deer in the picture.", "type": "attribute", "weight": 3 }, { "content": "One is eating grass with its head down in the lower right corner of the picture.", "type": "action", "weight": 2 }, { "content": "The other is near the edge of the woods in the center of the picture.", "type": "attribute", "weight": 2 }, { "content": "The deer on the right side of the picture is light brown in color.", "type": "attribute", "weight": 2 }, { "content": "The deer in the center of the picture has a light gray fur color.", "type": "attribute", "weight": 2 }, { "content": "The deer in the center of the picture has its head held high.", "type": "action", "weight": 2 }, { "content": "The background is a bare forest.", "type": "scene", "weight": 2 }, { "content": "The ground is full of withered branches and weeds.", "type": "scene", "weight": 3 } ] }, { "event": "Then, the camera starts to move to the right and forward. The deer in the center of the frame turns its head and bites the branch on the left side of the frame. The deer in the lower right gradually disappears on the right side of the frame.", "visual_elements": [ { "content": "The camera starts to move to the right and moves forward.", "type": "camera", "weight": 3 }, { "content": "The deer in the center of the picture turns its head and bites the branch on the left side of the picture.", "type": "action", "weight": 2 }, { "content": "The deer in the lower right corner of the picture gradually disappears on the right side of the picture.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the deer in the center of the picture has white spots on its body. The deer in the center of the picture is looking up and shaking its head while biting the branch above its head.", "visual_elements": [ { "content": "Finally, the deer in the center of the picture has white spots on its body.", "type": "attribute", "weight": 1 }, { "content": "The deer in the center of the picture raises its head and shakes it to bite the branch above its head.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0771", "video_path": "VIDGEN-1M/GjJBwebCrO0-Scene-0013.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 8.98, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a yellow puppy, which is held in the arms of a woman wearing a gray short-sleeved T-shirt. The puppy has big and bright eyes and drooping ears. The background is inside a car, with blurred green trees and white sky outside the car window on the left side of the picture.\nThe camera is then positioned at a low angle and moves to the right, with the dog's body and the woman's face gradually entering the frame. The woman has black hair. The woman is sitting in the passenger seat of the car, wearing a seatbelt. The dog looks out the window on the left side of the frame. The car window gradually disappears to the left side of the frame.\nFinally, the camera rotates to the right, keeping the dog and the person sideways. The dog sometimes looks up at the camera, and sometimes looks down at the woman's hand. The woman gently strokes the dog's back and head. At the end of the video, the dog turns to look at the person in the driver's seat, and then lies back on the woman's arm. On the right side of the screen, a tattooed arm on the driver's seat enters the screen and disappears immediately.", "events": [ { "event": "At the beginning of the video, the camera is focused on a yellow puppy, which is held in the arms of a woman wearing a gray short-sleeved T-shirt. The puppy has big and bright eyes and drooping ears. The background is inside a car, with blurred green trees and white sky outside the car window on the left side of the picture.", "visual_elements": [ { "content": "The camera is pointed at a yellow puppy.", "type": "camera", "weight": 3 }, { "content": "The puppy is held in the arms of a woman in a gray short-sleeved T-shirt.", "type": "attribute", "weight": 3 }, { "content": "The puppy has big, bright eyes and drooping ears.", "type": "attribute", "weight": 1 }, { "content": "The background is inside a car.", "type": "scene", "weight": 3 }, { "content": "There are blurry green trees and white sky outside the car window on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera is then positioned at a low angle and moves to the right, with the dog's body and the woman's face gradually entering the frame. The woman has black hair. The woman is sitting in the passenger seat of the car, wearing a seatbelt. The dog looks out the window on the left side of the frame. The car window gradually disappears to the left side of the frame.", "visual_elements": [ { "content": "The camera is placed at a low angle and gradually moves to the right.", "type": "camera", "weight": 3 }, { "content": "The dog's body and the woman's face gradually enter the screen.", "type": "attribute", "weight": 2 }, { "content": "The woman has black hair.", "type": "attribute", "weight": 2 }, { "content": "The woman sits in the passenger seat of the car, wearing a seat belt.", "type": "attribute", "weight": 2 }, { "content": "The dog looks out the window on the left side of the screen.", "type": "action", "weight": 2 }, { "content": "The car window gradually disappears to the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera rotates to the right, keeping the dog and the person sideways. The dog sometimes looks up at the camera, and sometimes looks down at the woman's hand. The woman gently strokes the dog's back and head. At the end of the video, the dog turns to look at the person in the driver's seat, and then lies back on the woman's arm. On the right side of the screen, a tattooed arm on the driver's seat enters the screen and disappears immediately.", "visual_elements": [ { "content": "Finally, the camera rotates to the right to shoot.", "type": "camera", "weight": 3 }, { "content": "The camera remains above the side of the person and the dog.", "type": "camera", "weight": 1 }, { "content": "The puppy sometimes looks up at the camera, and sometimes looks down at the woman's hand.", "type": "action", "weight": 2 }, { "content": "The woman gently strokes the puppy's back and head.", "type": "action", "weight": 2 }, { "content": "At the end of the video, the dog turns his head to look at the person in the driver's seat, and then lies back on the woman's arm.", "type": "action", "weight": 2 }, { "content": "A tattooed arm on the right side of the driver's seat enters the screen and disappears immediately.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0772", "video_path": "VIDGEN-1M/Gm5D4_sNt3c-Scene-0097.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 10.76, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a middle-aged male wearing a dark jacket and gray pants, holding three carrots, which are thin and long. In the lower left corner of the frame is a planting box with a plucked carrot on the surface of the soil and several un-pulled carrots in the box.\nThe camera gradually pulls in closer and the camera focuses on the carrot, which still has dirt and green leaves on it. The camera gradually pulls away, revealing only the ends of the carrots.", "events": [ { "event": "The video begins with the camera focusing on a middle-aged male wearing a dark jacket and gray pants, holding three carrots, which are thin and long. In the lower left corner of the frame is a planting box with a plucked carrot on the surface of the soil and several un-pulled carrots in the box.", "visual_elements": [ { "content": "The camera focusing on a middle-aged male wearing a dark jacket and gray pants.", "type": "camera", "weight": 3 }, { "content": "This person is holding three carrots.", "type": "action", "weight": 3 }, { "content": "The carrots are thin and long.", "type": "attribute", "weight": 3 }, { "content": "In the lower left corner of the frame is a planting box.", "type": "scene", "weight": 2 }, { "content": "A plucked carrot on the surface of the soil.", "type": "scene", "weight": 2 }, { "content": "Several un-pulled carrots are in the box.", "type": "scene", "weight": 1 } ] }, { "event": "The camera gradually pulls in closer and the camera focuses on the carrot, which still has dirt and green leaves on it. The camera gradually pulls away, revealing only the ends of the carrots.", "visual_elements": [ { "content": "The camera gradually pulls in closer.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the carrot,", "type": "camera", "weight": 3 }, { "content": "The carrots still have dirt and green leave", "type": "scene", "weight": 2 }, { "content": "The camera gradually pulls away", "type": "camera", "weight": 3 }, { "content": "The camera reveals only the ends of the carrots.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0773", "video_path": "VIDGEN-1M/H9exvm3CM9E-Scene-0189.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 11.18, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a large sushi roll placed on a wooden chopping board. The outer layer of the sushi roll that has been cut by a knife is wrapped with black seaweed. Inside, white rice and various colored ingredients can be seen, including light yellow egg sheets, green cucumber strips, orange salmon slices, dark yellow pickled radishes, etc. The close-up shot of the camera shows the details of the sushi roll.\nThe camera zooms in, and a hand wearing white gloves appears in the frame, holding a silver knife with its blade cutting into the sushi roll.\nThe camera continues to zoom in while moving from right to left. As the blade is repeatedly pushed forward, the sushi roll is cut into several slices.", "events": [ { "event": "At the beginning of the video, the camera is focused on a large sushi roll placed on a wooden chopping board. The outer layer of the sushi roll that has been cut by a knife is wrapped with black seaweed. Inside, white rice and various colored ingredients can be seen, including light yellow egg sheets, green cucumber strips, orange salmon slices, dark yellow pickled radishes, etc. The close-up shot of the camera shows the details of the sushi roll.", "visual_elements": [ { "content": "The camera is focused on a large sushi roll placed on a wooden chopping board.", "type": "camera", "weight": 3 }, { "content": "The outer layer of the sushi roll that has been cut by a knife is wrapped with black seaweed.", "type": "attribute", "weight": 3 }, { "content": "Inside, white rice and various colored ingredients can be seen.", "type": "attribute", "weight": 3 }, { "content": "The ingredients includes light yellow egg sheets, green cucumber strips, orange salmon slices, dark yellow pickled radishes, etc.", "type": "attribute", "weight": 2 }, { "content": "The close-up shot of the camera shows the details of the sushi roll.", "type": "camera", "weight": 3 } ] }, { "event": "The camera zooms in, and a hand wearing white gloves appears in the frame, holding a silver knife with its blade cutting into the sushi roll.", "visual_elements": [ { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "A hand wearing white gloves appears in the frame.", "type": "attribute", "weight": 1 }, { "content": "The hand holds a silver knife with its blade cutting into the sushi roll.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to zoom in while moving from right to left. As the blade is repeatedly pushed forward, the sushi roll is cut into several slices.", "visual_elements": [ { "content": "The camera continues to zoom in while moving from right to left.", "type": "camera", "weight": 3 }, { "content": "As the blade is repeatedly pushed forward, the sushi roll is cut into several slices.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0774", "video_path": "VIDGEN-1M/HKFQhAqxnmg-Scene-0008.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 7.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a white pickup truck equipped with a large snowplow. The headlights of the car are on, working in the snow at night. The body of the car is covered with a thick layer of snow, and the tires of the wheels are also covered with snow and mud. In the background is a dimly lit snowy street.\nThe camera focuses on the truck and follows it. The truck drives to the right of the screen and then turns left to a wall. There is warm light on the wall.", "events": [ { "event": "At the beginning of the video, the camera is focused on a white pickup truck equipped with a large snowplow. The headlights of the car are on, working in the snow at night. The body of the car is covered with a thick layer of snow, and the tires of the wheels are also covered with snow and mud. In the background is a dimly lit snowy street.", "visual_elements": [ { "content": "The camera is focused on a white pickup truck equipped with a large snowplow.", "type": "camera", "weight": 3 }, { "content": "The headlights of the car are on.", "type": "attribute", "weight": 1 }, { "content": "The car is working in the snow at night.", "type": "action", "weight": 2 }, { "content": "The body of the car is covered with a thick layer of snow, and the tires are also covered with snow and mud.", "type": "attribute", "weight": 1 }, { "content": "The background is a dim snowy street.", "type": "scene", "weight": 3 } ] }, { "event": "The camera focuses on the truck and follows it. The truck drives to the right of the screen and then turns left to a wall. There is warm light on the wall.", "visual_elements": [ { "content": "The camera focuses on the truck and follows the truck.", "type": "camera", "weight": 3 }, { "content": "The truck drives to the right of the screen and turns left to a wall.", "type": "action", "weight": 3 }, { "content": "There is a warm light on the wall.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0775", "video_path": "VIDGEN-1M/I3KuLmAW8ZI-Scene-0082.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 23.44, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on two small white mice in a green cage. The mice have red eyes and white fur. The background is the green wire mesh of the cage and some hay. The mouse on the left side of the screen is chewing hay on the ground. The mouse on the right side of the screen moves in the foreground, blurring the view.\nThen, the camera moves slightly to the right, focusing on the mouse on the left side of the screen. The mouse on the left side of the screen picks up a black object and chews it. The mouse on the right side of the screen moves around and blocks the camera and then runs out of the screen to the right side of the screen.\nFinally, the camera follows the mouse up and down. There is only one mouse left in the picture. After putting down the black object, the mouse stands up and looks around, and finally crawls to the right side of the screen, finds a yellow object, and lies on it to chew it.", "events": [ { "event": "The video begins with the camera focusing on two small white mice in a green cage. The mice have red eyes and white fur. The background is the green wire mesh of the cage and some hay. The mouse on the left side of the screen is chewing hay on the ground. The mouse on the right side of the screen moves in the foreground, blurring the view.", "visual_elements": [ { "content": "The camera is focused on two white mice in a green cage.", "type": "camera", "weight": 3 }, { "content": "The mice have red eyes and white fur.", "type": "attribute", "weight": 2 }, { "content": "The background is the green wire mesh of the cage and some hay.", "type": "scene", "weight": 3 }, { "content": "The mouse on the left side of the screen is chewing hay on the ground.", "type": "action", "weight": 2 }, { "content": "The mouse on the right side of the screen moves in the foreground, blurring the view.", "type": "action", "weight": 2 } ] }, { "event": "Then, the camera moves slightly to the right, focusing on the mouse on the left side of the screen. The mouse on the left side of the screen picks up a black object and chews it. The mouse on the right side of the screen moves around and blocks the camera and then runs out of the screen to the right side of the screen.", "visual_elements": [ { "content": "Then, the camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the mouse on the left side of the screen.", "type": "camera", "weight": 3 }, { "content": "The mouse on the left side of the screen picks up a black object and chews it.", "type": "action", "weight": 3 }, { "content": "The mouse on the right side of the screen moves around and blocks the camera, then runs out of the screen to the right.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera follows the mouse up and down. There is only one mouse left in the picture. After putting down the black object, the mouse stands up and looks around, and finally crawls to the right side of the screen, finds a yellow object, and lies on it to chew it.", "visual_elements": [ { "content": "Finally, the camera moves up and down with the mouse.", "type": "camera", "weight": 3 }, { "content": "There is only one mouse left in the picture.", "type": "attribute", "weight": 3 }, { "content": "The mouse puts down the black object, stands up and looks around, and finally crawls to the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The mouse finds a yellow object and lies on it to chew it.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0776", "video_path": "VIDGEN-1M/I9PNVVELshY-Scene-0128.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 5.67, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white man with short dark brown hair, wearing a black short-sleeved shirt and sunglasses, standing on a street in a residential area. There are several independent houses in the background, with lawns and bare trees in front of the houses. There is a dark brown convertible sports car parked on the street, with a closed roof, smooth body lines and black tires. There is a red SUV parked on the street on the left side of the screen. There is a dark blue car parked in front of the house on the right side of the screen.\nThe camera shakes left and right. The man keeps turning his body in the video, sometimes facing the camera, sometimes turning sideways, and sometimes making some gestures with his hands. The camera always keeps a close-up shot.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white man with short dark brown hair, wearing a black short-sleeved shirt and sunglasses, standing on a street in a residential area. There are several independent houses in the background, with lawns and bare trees in front of the houses. There is a dark brown convertible sports car parked on the street, with a closed roof, smooth body lines and black tires. There is a red SUV parked on the street on the left side of the screen. There is a dark blue car parked in front of the house on the right side of the screen.", "visual_elements": [ { "content": "The camera is pointed at a young white male.", "type": "camera", "weight": 3 }, { "content": "The man has short dark brown hair, wears a black short-sleeved shirt and sunglasses.", "type": "attribute", "weight": 2 }, { "content": "The man is standing on a street in a residential area.", "type": "scene", "weight": 3 }, { "content": "There are several independent houses in the background.", "type": "scene", "weight": 2 }, { "content": "There are lawns and bare trees in front of the houses.", "type": "scene", "weight": 1 }, { "content": "A dark brown convertible sports car is parked on the street.", "type": "attribute", "weight": 2 }, { "content": "The roof of the sports car is closed, the body lines are smooth, and the tires are black.", "type": "attribute", "weight": 2 }, { "content": "A red SUV is parked on the street on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "A dark blue car is parked in front of the house on the right side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera shakes left and right. The man keeps turning his body in the video, sometimes facing the camera, sometimes turning sideways, and sometimes making some gestures with his hands. The camera always keeps a close-up shot.", "visual_elements": [ { "content": "The camera shakes left and right.", "type": "camera", "weight": 3 }, { "content": "The man keeps turning his body in the video, sometimes facing the camera, sometimes sideways, and sometimes making some gestures with his hands.", "type": "action", "weight": 3 }, { "content": "The camera always keeps a close-up shot.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0777", "video_path": "VIDGEN-1M/IpNz6mmmitk-Scene-0035.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 5.64, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera is focused on a goal of a soccer field. There is a young male goalkeeper in front of the goal. The goalkeeper is wearing a red long-sleeved sportswear, black trousers and red gloves. The goalkeeper stands in front of the goal and successfully knocks back a white ball. The soccer field is surrounded by high metal railings and lush trees outside. There are some billboards hanging on the fence. There are wind turbines on the hills in the distance. The weather is clear, with a blue sky and white clouds in the background.\nNext, the camera switches to a young male player. The male player enters from the left side of the frame wearing a black short-sleeved shirt and shorts with yellow stripes. The player wears black socks and black football shoes. The player's jersey has the number \"13\" printed in yellow on the back. The player runs up and kicks an orange football with his right foot, which flies to the upper left corner of the goal. The goalkeeper quickly dives to the left, but the ball flies over his arms and into the goal. The camera moves forward slightly as the ball approaches the goal.\nFinally, the camera switches to a male player with a \"7\" printed on his back. The \"7\" player is wearing the same clothes as the \"13\" player. The \"7\" player runs into the camera from the left side of the screen and kicks a white football with his right foot. The football flies to the upper left corner of the goal. The goalkeeper quickly pounces to the left, but the football passes over his arm and flies into the goal.", "events": [ { "event": "The camera is focused on a goal of a soccer field. There is a young male goalkeeper in front of the goal. The goalkeeper is wearing a red long-sleeved sportswear, black trousers and red gloves. The goalkeeper stands in front of the goal and successfully knocks back a white ball. The soccer field is surrounded by high metal railings and lush trees outside. There are some billboards hanging on the fence. There are wind turbines on the hills in the distance. The weather is clear, with a blue sky and white clouds in the background.", "visual_elements": [ { "content": "The camera is focused on the goal of a football field.", "type": "camera", "weight": 3 }, { "content": "There is a young male goalkeeper in front of the goal.", "type": "attribute", "weight": 3 }, { "content": "The goalkeeper is wearing a red long-sleeved sportswear, black trousers and red gloves.", "type": "attribute", "weight": 3 }, { "content": "The goalkeeper stands in front of the goal and successfully knocks back a white ball.", "type": "action", "weight": 3 }, { "content": "The football field is surrounded by high metal railings and lush trees outside.", "type": "scene", "weight": 3 }, { "content": "There are some billboards hanging on the fence.", "type": "scene", "weight": 2 }, { "content": "There are wind turbines on the hills in the distance.", "type": "scene", "weight": 1 }, { "content": "The weather is clear and there is a blue sky and white clouds in the background.", "type": "scene", "weight": 1 } ] }, { "event": "Next, the camera switches to a young male player. The male player enters from the left side of the frame wearing a black short-sleeved shirt and shorts with yellow stripes. The player wears black socks and black football shoes. The player's jersey has the number \"13\" printed in yellow on the back. The player runs up and kicks an orange football with his right foot, which flies to the upper left corner of the goal. The goalkeeper quickly dives to the left, but the ball flies over his arms and into the goal. The camera moves forward slightly as the ball approaches the goal.", "visual_elements": [ { "content": "Next, the camera switches to a young male player.", "type": "camera", "weight": 3 }, { "content": "The male player enters from the left side of the frame wearing a black short-sleeved shirt and shorts with yellow stripes.", "type": "attribute", "weight": 3 }, { "content": "The player wears black socks and black football shoes.", "type": "attribute", "weight": 2 }, { "content": "The player's jersey has the number \"13\" printed in yellow on the back.", "type": "attribute", "weight": 1 }, { "content": "The player runs up and kicks an orange football with his right foot, which flies to the upper left corner of the goal.", "type": "action", "weight": 3 }, { "content": "The goalkeeper quickly dives to the left, but the ball flies over his arms and into the goal.", "type": "action", "weight": 2 }, { "content": "The camera moves forward slightly as the ball approaches the goal.", "type": "camera", "weight": 2 } ] }, { "event": "Finally, the camera switches to a male player with a \"7\" printed on his back. The \"7\" player is wearing the same clothes as the \"13\" player. The \"7\" player runs into the camera from the left side of the screen and kicks a white football with his right foot. The football flies to the upper left corner of the goal. The goalkeeper quickly pounces to the left, but the football passes over his arm and flies into the goal.", "visual_elements": [ { "content": "Finally, the camera switches to a male player with a \"7\" printed on his back.", "type": "camera", "weight": 3 }, { "content": "The \"7\" player is wearing the same clothes as the \"13\" player.", "type": "attribute", "weight": 3 }, { "content": "The \"7\" player runs into the camera from the left side of the screen and kicks a white football with his right foot.", "type": "action", "weight": 3 }, { "content": "The football flies to the upper left corner of the goal.", "type": "action", "weight": 2 }, { "content": "The goalkeeper quickly pounces to the left, but the football passes over his arm and flies into the goal.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0778", "video_path": "VIDGEN-1M/J48kyZfq50M-Scene-0121.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 9.61, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera pointing at a construction site, with a thin layer of dirt covering the ground. In the center of the frame are two wooden pallets with some gray bricks stacked on them. On the left, near the edge of the frame, is a higher layer of dirt. A blue tarp covers the dirt, with several wooden boards standing next to it. On the right, there is a row of neatly stacked gray bricks, next to a roll of black mesh material. There are mottled light and shadows on the ground.\nThe camera moves forward and a hand appears on the left side of the screen. The camera gradually focuses on the gray roller in the center of the screen. The roller is a bit damaged and is located in a depression with some dirt and gravel in front. The black mesh material extends to the side of the roller. From time to time, a hand appears on the left side of the camera and points at the roller.", "events": [ { "event": "The video begins with the camera pointing at a construction site, with a thin layer of dirt covering the ground. In the center of the frame are two wooden pallets with some gray bricks stacked on them. On the left, near the edge of the frame, is a higher layer of dirt. A blue tarp covers the dirt, with several wooden boards standing next to it. On the right, there is a row of neatly stacked gray bricks, next to a roll of black mesh material. There are mottled light and shadows on the ground.", "visual_elements": [ { "content": "The camera is pointed at a construction site.", "type": "camera", "weight": 3 }, { "content": "The ground is covered with a thin layer of soil.", "type": "attribute", "weight": 1 }, { "content": "There are two wooden pallets in the center of the screen, with some gray bricks piled on them.", "type": "attribute", "weight": 2 }, { "content": "There is a higher layer of soil on the left side near the edge of the screen.", "type": "attribute", "weight": 2 }, { "content": "The soil is covered with a blue waterproof cloth, and there are several wooden boards standing next to it.", "type": "attribute", "weight": 1 }, { "content": "There is a row of neatly stacked gray bricks on the right side.", "type": "attribute", "weight": 2 }, { "content": "There is a roll of black mesh material next to the bricks.", "type": "attribute", "weight": 1 }, { "content": "There are mottled light and shadows on the ground.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves forward and a hand appears on the left side of the screen. The camera gradually focuses on the gray roller in the center of the screen. The roller is a bit damaged and is located in a depression with some dirt and gravel in front. The black mesh material extends to the side of the roller. From time to time, a hand appears on the left side of the camera and points at the roller.", "visual_elements": [ { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "A hand appears on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The camera gradually focuses on the gray roller in the center of the screen.", "type": "camera", "weight": 3 }, { "content": "The roller is a little damaged and is in a depression.", "type": "attribute", "weight": 2 }, { "content": "There is some soil and gravel in front of the roller.", "type": "attribute", "weight": 1 }, { "content": "The black mesh material extends to the side of the roller.", "type": "attribute", "weight": 1 }, { "content": "The hand on the left side of the camera appears from time to time, pointing at the roller and making gestures.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0779", "video_path": "VIDGEN-1M/JlH-Q7BHzDo-Scene-0013.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Animals & Pets", "duration": 12.93, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a cave in the sandy ground. A pink crustacean emerges from the cave, revealing its long tentacles and part of its body. The animal has a fine grainy texture on its surface. The background is sand, which is light yellow with some small black particles scattered around the surface.\nThen, the camera zooms in on the crustacean's head and a slight rightward movement. The animal's eyes and mouthparts are clearly visible, with some fine yellow hairs on the mouthparts. The background is irregular caves in the light yellow sand. The black particles in the background disappear.\nThen, the camera switches angles again, focusing on the forelimbs of the crustacean, and the camera moves slightly to the lower right. The animal's forelimbs are covered with tiny spines, and its tentacles are very long.", "events": [ { "event": "The video begins with the camera focusing on a cave in the sandy ground. A pink crustacean emerges from the cave, revealing its long tentacles and part of its body. The animal has a fine grainy texture on its surface. The background is sand, which is light yellow with some small black particles scattered around the surface.", "visual_elements": [ { "content": "The video begins with the camera focusing on a cave in the sandy ground.", "type": "camera", "weight": 3 }, { "content": "A pink crustacean emerges from the cave, revealing its long tentacles and part of its body.", "type": "attribute", "weight": 3 }, { "content": "The animal has a fine grainy texture on its surface.", "type": "attribute", "weight": 2 }, { "content": "The background is sand.", "type": "scene", "weight": 3 }, { "content": "The sand is a light yellow.", "type": "scene", "weight": 1 }, { "content": "There are some small black particles scattered on the surface around the cave.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera zooms in on the crustacean's head and a slight rightward movement. The animal's eyes and mouthparts are clearly visible, with some fine yellow hairs on the mouthparts. The background is irregular caves in the light yellow sand. The black particles in the background disappear.", "visual_elements": [ { "content": "The camera zooms in on the crustacean's head.", "type": "camera", "weight": 3 }, { "content": "The camera is slightly panned to the right.", "type": "camera", "weight": 2 }, { "content": "The animal's eyes and mouthparts are clearly visible.", "type": "attribute", "weight": 2 }, { "content": "There are some fine yellow hairs on the mouthparts.", "type": "attribute", "weight": 1 }, { "content": "The background is irregular caves on the light yellow sand.", "type": "scene", "weight": 2 }, { "content": "The black particles in the background disappear.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches angles again, focusing on the forelimbs of the crustacean, and the camera moves slightly to the lower right. The animal's forelimbs are covered with tiny spines, and its tentacles are very long.", "visual_elements": [ { "content": "The camera moves slightly to the lower right.", "type": "camera", "weight": 3 }, { "content": "The camera is a close-up of the crustacean's forelimbs.", "type": "camera", "weight": 3 }, { "content": "The animal's forelimbs are covered with fine thorns.", "type": "attribute", "weight": 2 }, { "content": "The animal's tentacles are very long.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0780", "video_path": "VIDGEN-1M/KZcLTSSRkAc-Scene-0096.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 4.54, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a blur, the camera focuses on a display stand with two square aquariums on top of it, a dark curtain behind the stand, and a man dressed in gray standing next to the display case, part of his body entering the frame.\nThe camera slowly moves to the right and closes in, and the entire aquarium is displayed in the center of the frame. The aquarium is covered with green water plants and some rocks. On the right side of the aquarium is a white labeled sign with some text information.", "events": [ { "event": "The video begins with a blur, the camera focuses on a display stand with two square aquariums on top of it, a dark curtain behind the stand, and a man dressed in gray standing next to the display case, part of his body entering the frame.", "visual_elements": [ { "content": "The picture is blurry.", "type": "scene", "weight": 2 }, { "content": "The camera focuses on a display.", "type": "camera", "weight": 3 }, { "content": "Two square aquariums on top of the table.", "type": "scene", "weight": 2 }, { "content": "A dark curtain behind the stand.", "type": "scene", "weight": 2 }, { "content": "A man dressed in gray standing next to the display case.", "type": "scene", "weight": 2 }, { "content": "Part of his body entering the frame.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera slowly moves to the right and closes in, and the entire aquarium is displayed in the center of the frame. The aquarium is covered with green water plants and some rocks. On the right side of the aquarium is a white labeled sign with some text information.", "visual_elements": [ { "content": "The camera slowly moves to the right and closes in.", "type": "camera", "weight": 3 }, { "content": "The entire aquarium is displayed in the center of the frame.", "type": "scene", "weight": 3 }, { "content": "The aquarium is covered with green water plants and some rocks.", "type": "attribute", "weight": 3 }, { "content": "On the right side of the aquarium is a white labeled sign.", "type": "attribute", "weight": 2 }, { "content": "The sign has some text information on it.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0781", "video_path": "VIDGEN-1M/Ks12H-ufYAU-Scene-0007.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 12.11, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera is focused on a batter in a baseball game. The batter is wearing a red jersey with white numbers, gray pants with red stripes, and a black helmet. The batter is holding a bat, ready to hit the ball. The catcher next to him is wearing a white jersey and black protective gear with red stripes. The catcher is wearing a black baseball glove on his left hand and crouching to the right of the batter. The umpire is wearing a blue umpire uniform, gray pants, and black protective gear. The umpire is standing behind the catcher. The background is green grass and a brown infield area with white lines.\nThe camera moves to the right with the batter. The batter swings his bat and hits the flying baseball. After the ball is hit, the batter quickly runs to the right of the screen, running towards first base.\nThen the camera switches to the outfielder. The camera is in long shot, moving slightly forward. The outfielder is wearing a cap and a white jersey. The outfielder is running quickly to the right of the frame. Then the outfielder bends down to catch the ball.\nThe camera switches back to the batter. The camera continues to move to the right, following the batter's movements. The batter continues to run along the base line, passing first base and sprinting towards second base.\nFinally, the camera moves forward slightly. The batter slows down and stops on the second base bag with his back to the camera. Another teammate wearing a red jersey stands near second base. The batter is player number \"31\". The teammate is player number \"24\". The video ends with the batter and teammate face to face near second base.", "events": [ { "event": "The camera is focused on a batter in a baseball game. The batter is wearing a red jersey with white numbers, gray pants with red stripes, and a black helmet. The batter is holding a bat, ready to hit the ball. The catcher next to him is wearing a white jersey and black protective gear with red stripes. The catcher is wearing a black baseball glove on his left hand and crouching to the right of the batter. The umpire is wearing a blue umpire uniform, gray pants, and black protective gear. The umpire is standing behind the catcher. The background is green grass and a brown infield area with white lines.", "visual_elements": [ { "content": "The camera is focused on a batter in a baseball game.", "type": "camera", "weight": 3 }, { "content": "The batter is wearing a red jersey with white numbers, gray pants with red stripes, and a black helmet.", "type": "attribute", "weight": 3 }, { "content": "The batter is holding a bat, ready to hit the ball.", "type": "action", "weight": 3 }, { "content": "The catcher next to batter is wearing a white jersey and black protective gear with red stripes.", "type": "attribute", "weight": 2 }, { "content": "The catcher is wearing a black baseball glove on his left hand and crouching to the right of the batter.", "type": "action", "weight": 2 }, { "content": "The umpire is wearing a blue umpire uniform, gray pants, and black protective gear.", "type": "attribute", "weight": 1 }, { "content": "The umpire is standing behind the catcher.", "type": "action", "weight": 1 }, { "content": "The background is green grass and a brown infield area with white lines.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves to the right with the batter. The batter swings his bat and hits the flying baseball. After the ball is hit, the batter quickly runs to the right of the screen, running towards first base.", "visual_elements": [ { "content": "The camera moves to the right with the batter.", "type": "camera", "weight": 3 }, { "content": "The batter swings his bat and hits the flying baseball.", "type": "action", "weight": 3 }, { "content": "After the ball is hit, the batter quickly runs to the right of the screen, running towards first base.", "type": "action", "weight": 3 } ] }, { "event": "Then the camera switches to the outfielder. The camera is in long shot, moving slightly forward. The outfielder is wearing a cap and a white jersey. The outfielder is running quickly to the right of the frame. Then the outfielder bends down to catch the ball.", "visual_elements": [ { "content": "Then the camera switches to the outfielder.", "type": "camera", "weight": 3 }, { "content": "The camera is in long shot and moves slightly forward.", "type": "camera", "weight": 3 }, { "content": "The outfielder is wearing a cap and a white jersey.", "type": "attribute", "weight": 2 }, { "content": "The outfielder is running quickly to the right of the frame.", "type": "action", "weight": 3 }, { "content": "Then the outfielder bends down to catch the ball.", "type": "action", "weight": 3 } ] }, { "event": "The camera switches back to the batter. The camera continues to move to the right, following the batter's movements. The batter continues to run along the base line, passing first base and sprinting towards second base.", "visual_elements": [ { "content": "The camera switches back to the batter.", "type": "camera", "weight": 3 }, { "content": "The camera continues to move to the right, following the batter's movements.", "type": "camera", "weight": 3 }, { "content": "The batter continues to run along the base line, passing first base and sprinting towards second base.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera moves forward slightly. The batter slows down and stops on the second base bag with his back to the camera. Another teammate wearing a red jersey stands near second base. The batter is player number \"31\". The teammate is player number \"24\". The video ends with the batter and teammate face to face near second base.", "visual_elements": [ { "content": "Finally, the camera moves forward slightly.", "type": "action", "weight": 2 }, { "content": "The batter slows down and stops on the second base bag with his back to the camera.", "type": "action", "weight": 3 }, { "content": "Another teammate wearing a red jersey stands near second base.", "type": "attribute", "weight": 2 }, { "content": "The batter is player number \"31\".", "type": "attribute", "weight": 1 }, { "content": "The teammate is player number \"24\".", "type": "attribute", "weight": 1 }, { "content": "The video ends with the batter and teammate face to face near second base.", "type": "action", "weight": 1 } ] } ], "n_events": 5, "n_elements": 25 }, { "index": "TUNA_0782", "video_path": "VIDGEN-1M/KuxbDReTbJg-Scene-0052.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 11.41, "resolution": { "width": 1280, "height": 720 }, "caption": "The video starts out blurry, with the camera focused on a giant sunfish. The sunfish's body is oval in shape, and its color is mainly grayish white. The background is dark green, and there are faintly some small fish in the distance.\nThe camera suddenly moves to the left and the scene becomes clear, revealing the fish's lower body.\nThe camera moves from bottom to top, gradually revealing the entire body of the fish. The camera focuses mainly on the side and head of the sunfish, showing its texture. The surface of the fish has some mottled yellow and brown spots. The fins are small, located on both sides of the body, fan-shaped, and darker in color. The fish's eyes are large and round, with a brown horn on top.", "events": [ { "event": "The video starts out blurry, with the camera focused on a giant sunfish. The sunfish's body is oval in shape, and its color is mainly grayish white. The background is dark green, and there are faintly some small fish in the distance.", "visual_elements": [ { "content": "The video starts out blurry.", "type": "camera", "weight": 3 }, { "content": "The camera is focused on a giant sunfish.", "type": "camera", "weight": 3 }, { "content": "The sunfish's body is oval in shape and mainly grayish white in color.", "type": "attribute", "weight": 2 }, { "content": "The background is dark green.", "type": "scene", "weight": 3 }, { "content": "There are some small fish in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "The camera suddenly moves to the left and the scene becomes clear, revealing the fish's lower body.", "visual_elements": [ { "content": "The camera suddenly moves to the left and becomes clear.", "type": "camera", "weight": 3 }, { "content": "The fish's lower body is revealed.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves from bottom to top, gradually revealing the entire body of the fish. The camera focuses mainly on the side and head of the sunfish, showing its texture. The surface of the fish has some mottled yellow and brown spots. The fins are small, located on both sides of the body, fan-shaped, and darker in color. The fish's eyes are large and round, with a brown horn on top.", "visual_elements": [ { "content": "The camera moves from bottom to top.", "type": "camera", "weight": 3 }, { "content": "The fish's entire body gradually reveals itself.", "type": "attribute", "weight": 3 }, { "content": "The camera focuses mainly on the sides and head of the sunfish.", "type": "camera", "weight": 3 }, { "content": "The fish has some mottled yellow and brown spots on its surface.", "type": "attribute", "weight": 1 }, { "content": "The fins are small, located on both sides of the body, fan-shaped, and darker in color.", "type": "attribute", "weight": 2 }, { "content": "The fish's eyes are large and round, with a brown horn on top.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0783", "video_path": "VIDGEN-1M/Kx90cREXUqs-Scene-0014.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 7.44, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the interior of a car, with the screen primarily showing the driver's and passenger's seat areas. The steering wheel is located in the center of the frame, and the front dashboard has two round dials surrounded by silver-gray trim. The center console is overall light colored with a knob and several buttons. The center console has brown woodgrain trim.\nThe camera slowly moves to the right and the screen shows details of the driver's and passenger's seats. Both the passenger seat and the driver's seat are made of brown and black leather with visible stitching on the seats. The wood grain trim on the inside of the doors and the silver door handles are also clearly visible.\nThe camera then moves up to show the gray cushions on the driver's and passenger's seats, with white stone walls, trees and distant buildings visible in the background.", "events": [ { "event": "The video begins with the camera focusing on the interior of a car, with the screen primarily showing the driver's and passenger's seat areas. The steering wheel is located in the center of the frame, and the front dashboard has two round dials surrounded by silver-gray trim. The center console is overall light colored with a knob and several buttons. The center console has brown woodgrain trim.", "visual_elements": [ { "content": "The camera focuses on the interior of a car.", "type": "camera", "weight": 3 }, { "content": "The screen mainly shows the driver's and passenger's seat areas.", "type": "attribute", "weight": 3 }, { "content": "The steering wheel is in the center of the frame.", "type": "attribute", "weight": 3 }, { "content": "There are two round dials on the front dashboard.", "type": "attribute", "weight": 2 }, { "content": "The dials are surrounded by silver-gray trim.", "type": "attribute", "weight": 1 }, { "content": "The center console is light-colored.", "type": "attribute", "weight": 2 }, { "content": "There is a knob and a few buttons on the center console.", "type": "attribute", "weight": 2 }, { "content": "The center console has brown wood grain accents.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera slowly moves to the right and the screen shows details of the driver's and passenger's seats. Both the passenger seat and the driver's seat are made of brown and black leather with visible stitching on the seats. The wood grain trim on the inside of the doors and the silver door handles are also clearly visible.", "visual_elements": [ { "content": "The camera slowly moves to the right.", "type": "camera", "weight": 3 }, { "content": "The scene shows details of the driver's and passenger's seats.", "type": "attribute", "weight": 2 }, { "content": "Both the passenger seat and the driver's seat are finished in brown and black leather.", "type": "attribute", "weight": 3 }, { "content": "There is visible stitching on the seats.", "type": "attribute", "weight": 2 }, { "content": "Woodgrain trim and silver door handles are also visible on the inside of the doors.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera then moves up to show the gray cushions on the driver's and passenger's seats, with white stone walls, trees and distant buildings visible in the background.", "visual_elements": [ { "content": "The camera moves up.", "type": "camera", "weight": 3 }, { "content": "Showing the gray cushions on the driver's and passenger's seats.", "type": "attribute", "weight": 2 }, { "content": "White stone walls, trees and distant buildings can be seen in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0784", "video_path": "VIDGEN-1M/LWdEdLIh3wE-Scene-0004.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 12.26, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera is focused on a man's legs and feet. The man is wearing a black T-shirt with a green pattern, grey shorts and green sneakers. The man stands in the center of the screen and starts to juggle a ball. First, the man kicks the ball up with his left knee, then jumps and alternates his legs over the ball. Then, the man kicks the ball up with his foot again. While jumping, the man swings his hands to keep his balance. After repeating this twice, the man catches the ball with his hands. The background is a pure white wall.\nThen, the camera is focused on the front of the man. The young white man is wearing a black hat with a white pattern. The man lowers his head and uses his instep, toes, knees and other parts of his body to control the football, which bounces flexibly under his feet. The man also performs some difficult moves. The man first lifts the ball with his instep, then catches it in the air with his toes. Then, the man lifts the ball with his knees and quickly catches it with his instep.", "events": [ { "event": "The camera is focused on a man's legs and feet. The man is wearing a black T-shirt with a green pattern, grey shorts and green sneakers. The man stands in the center of the screen and starts to juggle a ball. First, the man kicks the ball up with his left knee, then jumps and alternates his legs over the ball. Then, the man kicks the ball up with his foot again. While jumping, the man swings his hands to keep his balance. After repeating this twice, the man catches the ball with his hands. The background is a pure white wall.", "visual_elements": [ { "content": "The camera is focused on a man's legs and feet.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black T-shirt with a green pattern, gray shorts and green sneakers.", "type": "attribute", "weight": 3 }, { "content": "The man stands in the center of the screen and starts to kick the ball.", "type": "action", "weight": 3 }, { "content": "First, the man kicks the ball up with his left knee, then jumps and alternately uses his legs to go over the ball.", "type": "action", "weight": 2 }, { "content": "Then, the man kicks the ball up with his foot.", "type": "action", "weight": 2 }, { "content": "While jumping, the man swings his hands to keep his balance.", "type": "action", "weight": 1 }, { "content": "After repeating this twice, the man catches the ball with his hands.", "type": "action", "weight": 1 }, { "content": "The background is a pure white wall.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera is focused on the front of the man. The young white man is wearing a black hat with a white pattern. The man lowers his head and uses his instep, toes, knees and other parts of his body to control the football, which bounces flexibly under his feet. The man also performs some difficult moves. The man first lifts the ball with his instep, then catches it in the air with his toes. Then, the man lifts the ball with his knees and quickly catches it with his instep.", "visual_elements": [ { "content": "Then, the camera is focused on the front of the man.", "type": "camera", "weight": 3 }, { "content": "The young white man is wearing a black hat with a white pattern.", "type": "attribute", "weight": 2 }, { "content": "The man lowers his head and controls the football with different parts of his foot, toes, knees, etc.", "type": "action", "weight": 3 }, { "content": "The football bounces flexibly under the man's feet.", "type": "action", "weight": 2 }, { "content": "The man also does some difficult moves.", "type": "attribute", "weight": 3 }, { "content": "The man first lifts the ball with the instep, then catches it with his toes in the air.", "type": "action", "weight": 2 }, { "content": "Then, the man lifts the ball with his knees and quickly catches it with the instep.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0785", "video_path": "VIDGEN-1M/M2gKgIKGwLI-Scene-0073.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 9.71, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a forest path covered with gravel and dirt. On the left side of the screen, there is a young white man wearing a green top and black pants on the forest path. The man wears a black helmet and rides a mountain bike. The camera stays behind the man and follows him forward. There are dense woods and green bushes on both sides of the path.\nAs the man reaches the downhill road at the end of the forest trail, the camera moves slightly to the left. On the right side of the screen, there is a warning sign and a wooden guardrail at the end of the forest trail. At the same time, a clearing surrounded by a wooden fence gate appears in front of the road. The cyclist gradually slows down after going downhill. The camera also slows down.\nFinally, the camera turns slightly to the right to follow the man's movements. The man turns to the right of the frame and stops on the road in front of the fence. Then, the man turns around and looks back. A hand wearing a black and white glove appears from the bottom left of the frame. The man extends his right hand wearing a green glove to high-five the hand in the bottom left of the frame. There are some trees and bushes beside the road.", "events": [ { "event": "At the beginning of the video, the camera is focused on a forest path covered with gravel and dirt. On the left side of the screen, there is a young white man wearing a green top and black pants on the forest path. The man wears a black helmet and rides a mountain bike. The camera stays behind the man and follows him forward. There are dense woods and green bushes on both sides of the path.", "visual_elements": [ { "content": "The camera is focused on a forest trail covered with gravel and dirt.", "type": "camera", "weight": 3 }, { "content": "On the left side of the screen, there is a young white man wearing a green top and black pants on the forest trail.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a black helmet and riding a mountain bike.", "type": "attribute", "weight": 3 }, { "content": "The camera stays behind the man and follows him forward.", "type": "camera", "weight": 3 }, { "content": "On both sides of the trail are dense woods and green bushes.", "type": "attribute", "weight": 2 } ] }, { "event": "As the man reaches the downhill road at the end of the forest trail, the camera moves slightly to the left. On the right side of the screen, there is a warning sign and a wooden guardrail at the end of the forest trail. At the same time, a clearing surrounded by a wooden fence gate appears in front of the road. The cyclist gradually slows down after going downhill. The camera also slows down.", "visual_elements": [ { "content": "When the man rides to the downhill road at the end of the forest trail, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "On the right side of the screen, there are warning signs and wooden guardrails at the end of the forest trail.", "type": "scene", "weight": 2 }, { "content": "At the same time, a clearing surrounded by a wooden fence gate appears in front of the road.", "type": "scene", "weight": 1 }, { "content": "Then, the man gradually slows down after going downhill, and the camera slows down accordingly.", "type": "action", "weight": 1 } ] }, { "event": "Finally, the camera turns slightly to the right to follow the man's movements. The man turns to the right of the frame and stops on the road in front of the fence. Then, the man turns around and looks back. A hand wearing a black and white glove appears from the bottom left of the frame. The man extends his right hand wearing a green glove to high-five the hand in the bottom left of the frame. There are some trees and bushes beside the road.", "visual_elements": [ { "content": "Finally, the camera turns slightly to the right with the man's movements.", "type": "camera", "weight": 3 }, { "content": "The man turns to the right side of the screen and stops on the road in front of the fence.", "type": "action", "weight": 3 }, { "content": "Then, the man turns around and looks back.", "type": "action", "weight": 3 }, { "content": "A hand wearing a black and white glove appears from the lower left of the screen.", "type": "attribute", "weight": 2 }, { "content": "The man extends his right hand wearing a green glove and high-fives the hand in the lower left of the screen.", "type": "action", "weight": 3 }, { "content": "There are some trees and bushes beside the road.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0786", "video_path": "VIDGEN-1M/MS0-9pXQW98-Scene-0029.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 7.88, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a small stream lined with green grass and some shrubs. As the camera moves slightly to the left, the trunk of a large tree with thick, dark brown bark is visible.\nThe camera moves to the left again, and a section of barbed wire fence appears in the downstream direction of the stream. Behind the fence is a concrete-paved walkway with no pedestrians on it, and next to the walkway are the walls of some buildings with some creepers on them.", "events": [ { "event": "The video begins with the camera focusing on a small stream lined with green grass and some shrubs. As the camera moves slightly to the left, the trunk of a large tree with thick, dark brown bark is visible.", "visual_elements": [ { "content": "The camera focuses on a small stream.", "type": "camera", "weight": 3 }, { "content": "The stream is lined with green grass and some bushes.", "type": "scene", "weight": 2 }, { "content": "The camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The trunk of a large tree can be seen.", "type": "scene", "weight": 2 }, { "content": "The trunk is thick and the bark is dark brown.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the left again, and a section of barbed wire fence appears in the downstream direction of the stream. Behind the fence is a concrete-paved walkway with no pedestrians on it, and next to the walkway are the walls of some buildings with some creepers on them.", "visual_elements": [ { "content": "The camera moves left again.", "type": "camera", "weight": 3 }, { "content": "Downstream of the stream, a barbed wire fence appears.", "type": "scene", "weight": 2 }, { "content": "Behind the fence is a concrete walkway.", "type": "attribute", "weight": 2 }, { "content": "There are no pedestrians on the trail.", "type": "attribute", "weight": 1 }, { "content": "Next to the trail are the walls of some buildings.", "type": "attribute", "weight": 2 }, { "content": "There are some creepers on the walls.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0787", "video_path": "VIDGEN-1M/Mk7WZ7jcAtE-Scene-0022.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 12.18, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a poodle lying in the back seat of a car. The dog's fur is beige, and a blue scarf with white stars is tied around its neck. The dog's head is leaning on the seat, its eyes are slightly closed, and its body is curled up. The background is the black seat in the back seat of the car, and the surface of the seat has a fine texture. On the left side of the picture, next to the dog's head is an orange ball.\nThe camera then moves down slightly and light is shining on the back seat on the left side of the frame.\nThen, the camera moves to the right, and the back half of the dog's body can be seen, with its tail curled up beside its body. Behind the dog are two cardboard boxes, the one on the right side of the screen is green, and the other is black, with some words and patterns on the boxes. The dog's body is pressed against the boxes. The dog's head disappears to the left side of the screen.\nFinally, the camera moves to the left. The dog's body shakes slightly, and its eyes are half open. The orange ball in the background is fully visible.", "events": [ { "event": "At the beginning of the video, the camera is focused on a poodle lying in the back seat of a car. The dog's fur is beige, and a blue scarf with white stars is tied around its neck. The dog's head is leaning on the seat, its eyes are slightly closed, and its body is curled up. The background is the black seat in the back seat of the car, and the surface of the seat has a fine texture. On the left side of the picture, next to the dog's head is an orange ball.", "visual_elements": [ { "content": "The camera is focused on a poodle lying in the back seat of a car.", "type": "camera", "weight": 3 }, { "content": "The dog's fur is beige.", "type": "attribute", "weight": 3 }, { "content": "A blue scarf is tied around the dog's neck.", "type": "attribute", "weight": 2 }, { "content": "The scarf has white stars on it.", "type": "attribute", "weight": 1 }, { "content": "The dog's head is resting on the seat, eyes slightly closed, and body curled up.", "type": "action", "weight": 3 }, { "content": "The background is a black seat in the back seat of the car.", "type": "scene", "weight": 3 }, { "content": "The surface of the seat has a fine texture.", "type": "scene", "weight": 1 }, { "content": "On the left side of the screen, next to the dog's head is an orange ball.", "type": "scene", "weight": 1 } ] }, { "event": "The camera then moves down slightly and light is shining on the back seat on the left side of the frame.", "visual_elements": [ { "content": "Then, the camera moves down slightly.", "type": "camera", "weight": 3 }, { "content": "There is light on the back seat on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves to the right, and the back half of the dog's body can be seen, with its tail curled up beside its body. Behind the dog are two cardboard boxes, the one on the right side of the screen is green, and the other is black, with some words and patterns on the boxes. The dog's body is pressed against the boxes. The dog's head disappears to the left side of the screen.", "visual_elements": [ { "content": "Then, the camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The back half of the dog's body enters the screen.", "type": "attribute", "weight": 3 }, { "content": "The dog's tail is curled up next to its body.", "type": "action", "weight": 2 }, { "content": "There are two cardboard boxes behind the dog.", "type": "scene", "weight": 3 }, { "content": "The cardboard box on the right side of the picture is green, and the other cardboard box is black.", "type": "scene", "weight": 2 }, { "content": "There are some words and patterns on the cardboard box.", "type": "scene", "weight": 1 }, { "content": "The dog's body is close to the black cardboard box.", "type": "action", "weight": 2 }, { "content": "The dog's head disappears to the left side of the picture.", "type": "attribute", "weight": 1 } ] }, { "event": "Finally, the camera moves to the left. The dog's body shakes slightly, and its eyes are half open. The orange ball in the background is fully visible.", "visual_elements": [ { "content": "Finally, the camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The dog's body shakes slightly, and his eyes are half open.", "type": "action", "weight": 2 }, { "content": "The orange ball in the background is fully revealed.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0788", "video_path": "VIDGEN-1M/Mp1ndGPOTB8-Scene-0061.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 12.28, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is aimed at the area above the chest of a baseball pitcher, who is facing away from the camera and preparing to pitch. He is wearing a gray jersey and a green baseball cap with the number 39 on the jersey. The pitcher raises his right arm high and the ball flies out of his hand. The camera moves upward along the direction of the ball. In the picture, a catcher wearing green protective gear is squatting behind the home plate, and behind the catcher is a umpire wearing black protective gear and gray trousers. The catcher's left glove is open to catch the ball, but the batter standing next to him in a white jersey swings and hits the ball, which flies to the right side of the picture.\nThe camera switches to the side of the batter to capture the batter's movements and the movement of the ball after being hit. After hitting the ball, the batter runs forward, and the ball jumps forward on the field, and the camera moves with the ball.\nThe screen switches to a distant view again. The pitcher wearing a gray jersey and a green baseball cap raises his right knee and throws the ball quickly. On the opposite side, the catcher wearing green protective gear stands to the right of another batter wearing a white jersey and a blue helmet. The batter swings the bat and hits the ball. The playing field is composed of a large area of green grass and many red areas. There are many spectators sitting in the audience seats in the background. In front of the audience seats are billboards of various brands and website information.", "events": [ { "event": "At the beginning of the video, the camera is aimed at the area above the chest of a baseball pitcher, who is facing away from the camera and preparing to pitch. He is wearing a gray jersey and a green baseball cap with the number 39 on the jersey. The pitcher raises his right arm high and the ball flies out of his hand. The camera moves upward along the direction of the ball. In the picture, a catcher wearing green protective gear is squatting behind the home plate, and behind the catcher is a umpire wearing black protective gear and gray trousers. The catcher's left glove is open to catch the ball, but the batter standing next to him in a white jersey swings and hits the ball, which flies to the right side of the picture.", "visual_elements": [ { "content": "The camera is aimed at the area above the chest of a baseball pitcher.", "type": "camera", "weight": 3 }, { "content": "The pitcher is facing away from the camera and preparing to pitch.", "type": "action", "weight": 3 }, { "content": "The pitcher is wearing a gray jersey and a green baseball cap.", "type": "attribute", "weight": 2 }, { "content": "The number 39 is on the jersey.", "type": "attribute", "weight": 2 }, { "content": "The pitcher's right arm is raised high.", "type": "action", "weight": 3 }, { "content": "The ball flies out of his hand.", "type": "action", "weight": 2 }, { "content": "The camera moves up along the direction of the ball's movement.", "type": "camera", "weight": 3 }, { "content": "In the picture, a catcher wearing green protective gear is squatting behind the home plate.", "type": "attribute", "weight": 3 }, { "content": "Behind the catcher, a umpire wearing black protective gear and gray trousers is half-crouching.", "type": "attribute", "weight": 3 }, { "content": "The catcher's left glove is open, ready to catch the ball.", "type": "action", "weight": 3 }, { "content": "But the batter standing next to him wearing a white jersey swings and hits the ball.", "type": "action", "weight": 3 }, { "content": "The ball flies to the right side of the picture.", "type": "action", "weight": 2 } ] }, { "event": "The camera switches to the side of the batter to capture the batter's movements and the movement of the ball after being hit. After hitting the ball, the batter runs forward, and the ball jumps forward on the field, and the camera moves with the ball.", "visual_elements": [ { "content": "The camera switches to the side of the batter.", "type": "camera", "weight": 3 }, { "content": "The camera captures the batter's movements and the movement of the ball after being hit.", "type": "camera", "weight": 3 }, { "content": "After hitting the ball, the batter runs forward.", "type": "action", "weight": 1 }, { "content": "The ball jumps forward on the field.", "type": "action", "weight": 3 }, { "content": "The camera moves along with the ball.", "type": "camera", "weight": 3 } ] }, { "event": "The screen switches to a distant view again. The pitcher wearing a gray jersey and a green baseball cap raises his right knee and throws the ball quickly. On the opposite side, the catcher wearing green protective gear stands to the right of another batter wearing a white jersey and a blue helmet. The batter swings the bat and hits the ball. The playing field is composed of a large area of green grass and many red areas. There are many spectators sitting in the audience seats in the background. In front of the audience seats are billboards of various brands and website information.", "visual_elements": [ { "content": "The picture switches to a long shot again.", "type": "camera", "weight": 3 }, { "content": "The pitcher wearing a gray jersey and a green baseball cap raises his right knee high and throws the ball quickly.", "type": "action", "weight": 3 }, { "content": "To the right of the catcher wearing green protective gear stands another batter wearing a white jersey and a blue helmet.", "type": "attribute", "weight": 3 }, { "content": "The batter swings the bat and hits the ball.", "type": "action", "weight": 3 }, { "content": "The playing field consists of large green grass and many red areas.", "type": "scene", "weight": 2 }, { "content": "There are many spectators sitting in the audience seats in the background.", "type": "scene", "weight": 2 }, { "content": "In front of the audience seats are billboards and website information of various brands.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 24 }, { "index": "TUNA_0789", "video_path": "VIDGEN-1M/NQBObmairzE-Scene-0027.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 6.24, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the pitcher and batter at the baseball field. The pitcher is wearing a white jersey and a black baseball cap with the words \"FERNANDEZ 16\" printed on the back of the jersey. The pitcher is also wearing black sleeves and orange gloves. The batter is wearing a gray jersey and a black cap. The catcher next to him is wearing a white jersey and black protective gear. Behind the catcher is the umpire wearing a black top and gray bottoms and a protective helmet. The catcher's movements after catching the ball are shown in the picture. In the background are the audience in the stands and billboards of various brands around the field.\nThe camera switches to a long shot, and the pitcher raises his left knee to start the pitching action, throwing the ball to the catcher with his right hand. The batter prepares to hit the ball but fails, and the ball flies to the catcher who is squatting on the ground in black protective gear, with the umpire behind him. The batter then bends down to put the bat on the field, and the catcher and the umpire stand up.\nThe camera switches to a close-up shot of the pitcher wearing a white jersey below the chest and above the thigh, with an orange glove on his left hand and preparing to pitch with his right hand.", "events": [ { "event": "The video begins with the camera focusing on the pitcher and batter at the baseball field. The pitcher is wearing a white jersey and a black baseball cap with the words \"FERNANDEZ 16\" printed on the back of the jersey. The pitcher is also wearing black sleeves and orange gloves. The batter is wearing a gray jersey and a black cap. The catcher next to him is wearing a white jersey and black protective gear. Behind the catcher is the umpire wearing a black top and gray bottoms and a protective helmet. The catcher's movements after catching the ball are shown in the picture. In the background are the audience in the stands and billboards of various brands around the field.", "visual_elements": [ { "content": "The camera is focused on the pitcher and batter at the baseball field.", "type": "camera", "weight": 3 }, { "content": "The pitcher is wearing a white jersey and a black baseball cap.", "type": "attribute", "weight": 3 }, { "content": "The words \"FERNANDEZ 16\" are printed on the back of the jersey.", "type": "attribute", "weight": 2 }, { "content": "The pitcher is also wearing black sleeves and orange gloves.", "type": "attribute", "weight": 2 }, { "content": "The batter is wearing a gray jersey and a black cap.", "type": "attribute", "weight": 3 }, { "content": "The catcher next to him is wearing a white jersey and black protective gear.", "type": "attribute", "weight": 3 }, { "content": "Behind the catcher is the umpire wearing a black top and gray bottoms and a protective helmet.", "type": "attribute", "weight": 2 }, { "content": "The picture shows the catcher's movements after catching the ball.", "type": "attribute", "weight": 2 }, { "content": "In the background, there are spectators in the stands and billboards of various brands around the field.", "type": "scene", "weight": 2 } ] }, { "event": "The camera switches to a long shot, and the pitcher raises his left knee to start the pitching action, throwing the ball to the catcher with his right hand. The batter prepares to hit the ball but fails, and the ball flies to the catcher who is squatting on the ground in black protective gear, with the umpire behind him. The batter then bends down to put the bat on the field, and the catcher and the umpire stand up.", "visual_elements": [ { "content": "The camera switches to a long shot.", "type": "camera", "weight": 3 }, { "content": "The pitcher raises his left knee to start pitching.", "type": "action", "weight": 3 }, { "content": "The pitcher throws the ball to the catcher with his right hand.", "type": "action", "weight": 3 }, { "content": "The batter prepares to hit but fails.", "type": "action", "weight": 3 }, { "content": "The ball flies to the catcher who is squatting on the ground in black protective gear.", "type": "action", "weight": 3 }, { "content": "Behind the catcher is the umpire.", "type": "attribute", "weight": 2 }, { "content": "The batter bends down to put the bat on the field.", "type": "action", "weight": 2 }, { "content": "The catcher and the umpire stand up.", "type": "action", "weight": 1 } ] }, { "event": "The camera switches to a close-up shot of the pitcher wearing a white jersey below the chest and above the thigh, with an orange glove on his left hand and preparing to pitch with his right hand.", "visual_elements": [ { "content": "The camera switches to a close shot.", "type": "camera", "weight": 3 }, { "content": "The camera captures the area below the chest and above the thigh of the pitcher wearing a white jersey.", "type": "camera", "weight": 3 }, { "content": "The pitcher wears an orange glove on his left hand.", "type": "attribute", "weight": 3 }, { "content": "The pitcher prepares to pitch with his right hand.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0790", "video_path": "VIDGEN-1M/NVedGeVPc30-Scene-0085.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 3.6, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at the counter of a bar. In the background, various bottles of wine are neatly arranged on the shelves. On the right side of the screen, a middle-aged white man wearing a dark T-shirt stands in front of the counter. On the left side of the screen, a man smiles and looks up into the screen. The man on the left has a beard, wears a black T-shirt, and has short, slightly messy hair. The man on the right clenches his right fist and smiles as he speak to the man on the left.\nThe camera moves to the left, and the focus is on the man on the left side of the screen. The man on the right side of the screen turns to the camera and speaks, and gradually disappears to the right side of the screen. The man on the left side of the screen looks up and smiles while spreading the five fingers of his right hand upwards.", "events": [ { "event": "At the beginning of the video, the camera is pointed at the counter of a bar. In the background, various bottles of wine are neatly arranged on the shelves. On the right side of the screen, a middle-aged white man wearing a dark T-shirt stands in front of the counter. On the left side of the screen, a man smiles and looks up into the screen. The man on the left has a beard, wears a black T-shirt, and has short, slightly messy hair. The man on the right clenches his right fist and smiles as he speak to the man on the left.", "visual_elements": [ { "content": "The camera is pointed at the counter of a bar.", "type": "camera", "weight": 3 }, { "content": "In the background, various bottles of wine are neatly arranged on the shelves.", "type": "scene", "weight": 2 }, { "content": "On the right side of the screen, a middle-aged white man in a dark T-shirt stands in front of the counter.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the screen, a young white man enters the screen.", "type": "attribute", "weight": 3 }, { "content": "The man on the left has a beard, wears a black T-shirt, and has short, slightly messy hair.", "type": "attribute", "weight": 2 }, { "content": "The man on the right holds his right fist and speaks to the man on the left with a smile.", "type": "action", "weight": 3 }, { "content": "The man on the left smiles and raises his head.", "type": "action", "weight": 2 } ] }, { "event": "The camera moves to the left, and the focus is on the man on the left side of the screen. The man on the right side of the screen turns to the camera and speaks, and gradually disappears to the right side of the screen. The man on the left side of the screen looks up and smiles while spreading the five fingers of his right hand upwards.", "visual_elements": [ { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the man on the left side of the screen.", "type": "camera", "weight": 3 }, { "content": "The man on the right side of the screen turns to the camera and speaks, gradually disappearing to the right side of the screen.", "type": "action", "weight": 2 }, { "content": "The man on the left side of the screen smiles and raises his right hand with five fingers upward.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0791", "video_path": "VIDGEN-1M/NiC1x17G6jo-Scene-0110.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 6.41, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, there are several basketball players playing a game. The player in the yellow jersey holds the ball with the number \"9\" printed on it. The No. 9 player in the yellow jersey pats the basketball and moves forward. There are defensive players in blue jerseys to his right and behind him. The number \"9\" is printed on the blue jersey of the defensive player behind him. The player in the yellow jersey tries to break through the defense and step back to the left, but loses balance during dribbling, causing his body to fall forward, and the basketball slips out of his hand. The camera moves to the left with the player in the yellow jersey.\nThen the basketball is quickly grabbed by the No. 9 player in the blue jersey, who runs in a circle to the left, and the camera moves slightly to the right. There are several spectators and photographers sitting on the sidelines in the background. The words \"ONTARIO SOUTHERN CALIFORNIA\" are printed on the blue floor of the sidelines. The playing field is a light-colored reflective floor with blue lines drawn on it.", "events": [ { "event": "At the beginning of the video, there are several basketball players playing a game. The player in the yellow jersey holds the ball with the number \"9\" printed on it. The No. 9 player in the yellow jersey pats the basketball and moves forward. There are defensive players in blue jerseys to his right and behind him. The number \"9\" is printed on the blue jersey of the defensive player behind him. The player in the yellow jersey tries to break through the defense and step back to the left, but loses balance during dribbling, causing his body to fall forward, and the basketball slips out of his hand. The camera moves to the left with the player in the yellow jersey.", "visual_elements": [ { "content": "There are several basketball players playing in the picture.", "type": "attribute", "weight": 3 }, { "content": "The player in the yellow jersey holds the ball.", "type": "attribute", "weight": 2 }, { "content": "The number \"9\" is printed on the jersey.", "type": "attribute", "weight": 2 }, { "content": "The player in the yellow jersey, No. 9, pats the basketball and moves forward.", "type": "action", "weight": 3 }, { "content": "There are defensive players in blue jerseys to the right and behind the player in the yellow jersey.", "type": "attribute", "weight": 2 }, { "content": "The number \"9\" is printed on the blue jersey of the defensive player behind the player in the yellow jersey.", "type": "attribute", "weight": 2 }, { "content": "The player in the yellow jersey tries to break through the defense and steps back and left.", "type": "action", "weight": 3 }, { "content": "But during the dribbling, the player in the yellow jersey loses balance and falls forward.", "type": "action", "weight": 3 }, { "content": "The basketball slips out of the hands of the player in the yellow jersey.", "type": "action", "weight": 3 }, { "content": "The camera moves to the left with the player in the yellow jersey.", "type": "camera", "weight": 3 } ] }, { "event": "Then the basketball is quickly grabbed by the No. 9 player in the blue jersey, who runs in a circle to the left, and the camera moves slightly to the right. There are several spectators and photographers sitting on the sidelines in the background. The words \"ONTARIO SOUTHERN CALIFORNIA\" are printed on the blue floor of the sidelines. The playing field is a light-colored reflective floor with blue lines drawn on it.", "visual_elements": [ { "content": "Then the basketball is quickly grabbed by the player in the blue jersey.", "type": "action", "weight": 3 }, { "content": "The player in the blue jersey, No. 9, runs in a circle to the left.", "type": "action", "weight": 3 }, { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "There are several spectators and photographers sitting on the sidelines in the background.", "type": "scene", "weight": 2 }, { "content": "The words \"ONTARIO SOUTHERN CALIFORNIA\" are printed on the blue floor on the sidelines.", "type": "attribute", "weight": 2 }, { "content": "The playing field is a light-colored reflective floor.", "type": "attribute", "weight": 2 }, { "content": "There are blue lines painted on the floor.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0792", "video_path": "VIDGEN-1M/O-OnOIsCmrg-Scene-0131.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 15.16, "resolution": { "width": 1280, "height": 720 }, "caption": "In the upper left corner of the frame is the yellow logo of “SAFARI”, a vast field of tulips with many tourists, and in the distance are hills covered with vegetation. The camera moves to the right and the field is filled with red, yellow and orange tulips. The field is surrounded by green ropes with an English signboard on them.\nThe camera continues to move to the right, and the camera pans forward to make the tulips in the field of flowers clearer in the frame.", "events": [ { "event": "In the upper left corner of the frame is the yellow logo of “SAFARI”, a vast field of tulips with many tourists, and in the distance are hills covered with vegetation. The camera moves to the right and the field is filled with red, yellow and orange tulips. The field is surrounded by green ropes with an English signboard on them.", "visual_elements": [ { "content": "In the upper left corner of the image is the yellow logo of “SAFARI”.", "type": "scene", "weight": 1 }, { "content": "The picture shows a vast field of tulips.", "type": "scene", "weight": 3 }, { "content": "There are many tourists in the field.", "type": "attribute", "weight": 2 }, { "content": "In the distance are hills covered with vegetation.", "type": "attribute", "weight": 2 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "There are red, yellow and orange tulips in the field.", "type": "scene", "weight": 2 }, { "content": "The field is surrounded by green ropes", "type": "attribute", "weight": 2 }, { "content": "There is a sign in English on the rope", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move to the right, and the camera pans forward to make the tulips in the field of flowers clearer in the frame.", "visual_elements": [ { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "The camera pans forward so that the tulips in the field are clearer in the frame.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0793", "video_path": "VIDGEN-1M/OBW8x-JdLaM-Scene-0150.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 7.26, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on an indoor equestrian arena with brown dirt on the ground. The camera keeps a long shot and moves quickly to the right. On the left side of the screen, a person is riding a brown horse. The person and the horse run to the right side of the screen. In the center of the arena, there are several blue barrels with purple bands and some logos. In the background, there is a green fence around the arena. There are some billboards hanging on the fence. There are many bright lights installed on the ceiling above the arena.\nThe camera stabilizes as the rider approaches the blue barrel in the center of the frame. The rider is wearing a yellow top, blue jeans, and a gray helmet. The horse circles around the blue barrel and then begins to run toward the camera.\nFinally, the camera moves slightly to the left. The horse runs to a blue barrel on the left side of the frame, close to the camera, and circles around it. On the horse is a young woman with a low silver ponytail. The woman's legs quickly clamp around the horse's back.", "events": [ { "event": "The video begins with the camera focused on an indoor equestrian arena with brown dirt on the ground. The camera keeps a long shot and moves quickly to the right. On the left side of the screen, a person is riding a brown horse. The person and the horse run to the right side of the screen. In the center of the arena, there are several blue barrels with purple bands and some logos. In the background, there is a green fence around the arena. There are some billboards hanging on the fence. There are many bright lights installed on the ceiling above the arena.", "visual_elements": [ { "content": "The camera is focused on an indoor equestrian arena with brown dirt on the ground.", "type": "camera", "weight": 3 }, { "content": "The camera keeps a long shot and moves quickly to the right.", "type": "camera", "weight": 3 }, { "content": "There is a person riding a brown horse on the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The person and the horse run to the right side of the screen.", "type": "action", "weight": 3 }, { "content": "There are several blue barrels with purple bands and some signs in the center of the arena.", "type": "attribute", "weight": 2 }, { "content": "There are green fences around the arena.", "type": "attribute", "weight": 2 }, { "content": "There are some billboards hanging on the fence.", "type": "attribute", "weight": 2 }, { "content": "There are many bright lights installed on the ceiling above the arena.", "type": "scene", "weight": 1 } ] }, { "event": "The camera stabilizes as the rider approaches the blue barrel in the center of the frame. The rider is wearing a yellow top, blue jeans, and a gray helmet. The horse circles around the blue barrel and then begins to run toward the camera.", "visual_elements": [ { "content": "When the rider approaches the blue barrel in the center of the screen, the camera becomes stable.", "type": "camera", "weight": 3 }, { "content": "The rider wears a yellow top and blue jeans and a gray helmet.", "type": "attribute", "weight": 2 }, { "content": "The horse circles around the blue barrel and starts running towards the camera.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera moves slightly to the left. The horse runs to a blue barrel on the left side of the frame, close to the camera, and circles around it. On the horse is a young woman with a low silver ponytail. The woman's legs quickly clamp around the horse's back.", "visual_elements": [ { "content": "Finally, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The horse runs to a blue barrel on the left side of the screen close to the camera and circles around it.", "type": "action", "weight": 3 }, { "content": "On the horse is a young lady with a silver low ponytail.", "type": "attribute", "weight": 1 }, { "content": "The woman quickly clamps her legs on the horse's back.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0794", "video_path": "VIDGEN-1M/OPgYAdBIPio-Scene-0108.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 6.87, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera is focused on an outdoor open space. There is a young man wearing a black helmet, a white long-sleeved shirt and black pants. The man is riding a black BMX bike. The man is riding to the left of the screen. The weather is clear and sunny. There are some tall buildings and bare trees in the background. The ground of the venue is paved with gray and red bricks. There are multiple skateboard ramps and benches in the venue.\nThen, the camera moves to the upper left along with the man's movements. The man rides his bicycle close to a U-shaped ramp paved with red bricks. Then the man accelerates towards the ramp, completes a rotation in the air, forming a perfect arc. Finally, the man rides his bicycle steadily back to the ground.\nAt the end of the video, the camera moves to the lower right as the man goes downhill. The man rides sideways to the camera and enters the foreground of the camera. The man's white long sleeves have red stripes. The man rides to another U-shaped slope on the right side of the screen.", "events": [ { "event": "The camera is focused on an outdoor open space. There is a young man wearing a black helmet, a white long-sleeved shirt and black pants. The man is riding a black BMX bike. The man is riding to the left of the screen. The weather is clear and sunny. There are some tall buildings and bare trees in the background. The ground of the venue is paved with gray and red bricks. There are multiple skateboard ramps and benches in the venue.", "visual_elements": [ { "content": "The camera is focused on an outdoor open space.", "type": "camera", "weight": 3 }, { "content": "There is a young man wearing a black helmet, a white long-sleeved shirt and black pants in the picture.", "type": "attribute", "weight": 3 }, { "content": "The man rides a black BMX bike to the left of the picture.", "type": "attribute", "weight": 3 }, { "content": "The weather is clear and sunny.", "type": "scene", "weight": 1 }, { "content": "There are some tall buildings and bare trees in the background.", "type": "scene", "weight": 2 }, { "content": "The ground of the venue is paved with gray and red bricks.", "type": "attribute", "weight": 2 }, { "content": "There are several skateboard ramps and benches in the venue.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera moves to the upper left along with the man's movements. The man rides his bicycle close to a U-shaped ramp paved with red bricks. Then the man accelerates towards the ramp, completes a rotation in the air, forming a perfect arc. Finally, the man rides his bicycle steadily back to the ground.", "visual_elements": [ { "content": "Then, the camera moves to the upper left with the man's movements.", "type": "camera", "weight": 3 }, { "content": "The man rides his bike close to a U-shaped ramp paved with red bricks.", "type": "action", "weight": 3 }, { "content": "Then the man accelerates and rides towards the ramp, completing a rotation in the air, forming a perfect arc.", "type": "action", "weight": 3 }, { "content": "Finally, the man rides the bike steadily back to the ground.", "type": "action", "weight": 2 } ] }, { "event": "At the end of the video, the camera moves to the lower right as the man goes downhill. The man rides sideways to the camera and enters the foreground of the camera. The man's white long sleeves have red stripes. The man rides to another U-shaped slope on the right side of the screen.", "visual_elements": [ { "content": "At the end of the video, the camera moves to the lower right as the man goes downhill.", "type": "camera", "weight": 3 }, { "content": "The man rides sideways to the camera and arrives in the foreground of the camera.", "type": "action", "weight": 3 }, { "content": "There are red stripes on the man's white long-sleeved shirt.", "type": "attribute", "weight": 1 }, { "content": "The man rides to another U-shaped ramp on the right side of the picture.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0795", "video_path": "VIDGEN-1M/OgEEP-VjThA-Scene-0059.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 7.56, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a young white female skater wearing a black top and purple leggings. The woman is wearing white skates and her hair is tied into a neat bun. The woman performs a spin on the ice, crossing her arms over her chest and then unfolding them. In the background, a female skater in a blue skirt enters the frame from the right and skates to the left. In the background is a wide ice rink with a smooth surface that reflects the brightness of the lights.\nThe camera follows the black skater's movements. After the black skater completes the rotation, she begins to skate to the left of the screen, swinging her arms naturally. Another young black female skater wearing a gold top and black pants enters from the left side of the screen and skates to the right side of the screen. The word \"USA\" is printed on the back of the black skater's top. The surrounding billboards are reflected on the ice surface.\nFinally, the black skater turns to face the camera and skates to the right side of the screen. Another Asian female skater in a blue skirt enters from the lower left of the screen and skates to the back right of the screen. The colorful \"RANCE 2017\" is printed on the ground in the lower left of the screen.", "events": [ { "event": "The video begins with the camera focusing on a young white female skater wearing a black top and purple leggings. The woman is wearing white skates and her hair is tied into a neat bun. The woman performs a spin on the ice, crossing her arms over her chest and then unfolding them. In the background, a female skater in a blue skirt enters the frame from the right and skates to the left. In the background is a wide ice rink with a smooth surface that reflects the brightness of the lights.", "visual_elements": [ { "content": "The camera focuses on a young Caucasian female skater wearing a black top and purple leggings.", "type": "camera", "weight": 3 }, { "content": "The woman wears white skates and has her hair in a neat bun.", "type": "attribute", "weight": 2 }, { "content": "The woman performs a spin on the ice, crossing her arms over her chest and then spreading them.", "type": "action", "weight": 3 }, { "content": "In the background, a female skater in a blue skirt enters the frame from the right and skates to the left.", "type": "scene", "weight": 2 }, { "content": "The background is a wide ice rink.", "type": "scene", "weight": 3 }, { "content": "The ice is smooth and reflects the brightness of the light.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera follows the black skater's movements. After the black skater completes the rotation, she begins to skate to the left of the screen, swinging her arms naturally. Another young black female skater wearing a gold top and black pants enters from the left side of the screen and skates to the right side of the screen. The word \"USA\" is printed on the back of the black skater's top. The surrounding billboards are reflected on the ice surface.", "visual_elements": [ { "content": "The camera follows the black skater's movements.", "type": "camera", "weight": 3 }, { "content": "After the black skater completes the rotation, she starts to skate to the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The black skater swings her arms naturally.", "type": "action", "weight": 2 }, { "content": "The word \"USA\" is printed on the back of the black skater's shirt.", "type": "attribute", "weight": 1 }, { "content": "Then, a young black female skater wearing a gold top and black pants appears in the screen.", "type": "attribute", "weight": 2 }, { "content": "The black female skater enters from the left side of the screen and skates to the right side of the screen.", "type": "action", "weight": 2 }, { "content": "The surrounding billboards are reflected on the ice surface.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the black skater turns to face the camera and skates to the right side of the screen. Another Asian female skater in a blue skirt enters from the lower left of the screen and skates to the back right of the screen. The colorful \"RANCE 2017\" is printed on the ground in the lower left of the screen.", "visual_elements": [ { "content": "Finally, the black skater turns to face the camera and skates to the right side of the screen.", "type": "action", "weight": 3 }, { "content": "Another Asian female skater in a blue skirt enters from the lower left of the screen and skates to the right back of the screen.", "type": "action", "weight": 1 }, { "content": "The colorful \"RANCE 2017\" is printed on the ground in the lower left of the screen.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0796", "video_path": "VIDGEN-1M/X53Mmz75ReI-Scene-0074.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 10.39, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at an indoor basketball court. A young white man is standing on the basketball court. The man is wearing a white T-shirt with prints and black trousers, and white sneakers on his feet. The man has pink dreadlocks and a silver necklace around his neck. The man is holding an orange basketball in his hand. The basketball court floor is light-colored with red and black lines. There are multiple basketball hoops and baskets around the court. There are silver benches on the left side of the screen, and a volleyball net and red volleyball posts on the right side of the screen. There are multiple hanging lamps on the top of the court, providing ample lighting.\nThe camera keeps shaking left and right. The man stands in place and dribs the ball, performing some basic basketball moves.\nThen, the camera pans as the man moves to the left. The man pats the ball and moves to the left of the screen. When he reaches the bottom of the basket, the man spins in the air and passes the basketball under his legs, then jumps up and shoots. There are some signs and signs on the white walls of the basketball court. There are also multiple emergency exits and red indicator lights installed on the walls.", "events": [ { "event": "At the beginning of the video, the camera is pointed at an indoor basketball court. A young white man is standing on the basketball court. The man is wearing a white T-shirt with prints and black trousers, and white sneakers on his feet. The man has pink dreadlocks and a silver necklace around his neck. The man is holding an orange basketball in his hand. The basketball court floor is light-colored with red and black lines. There are multiple basketball hoops and baskets around the court. There are silver benches on the left side of the screen, and a volleyball net and red volleyball posts on the right side of the screen. There are multiple hanging lamps on the top of the court, providing ample lighting.", "visual_elements": [ { "content": "The camera is pointed at an indoor basketball court.", "type": "camera", "weight": 3 }, { "content": "A young white man is standing on the basketball court.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a white T-shirt with prints, black trousers and white sneakers.", "type": "attribute", "weight": 2 }, { "content": "The man has pink dreadlocks and a silver necklace around his neck.", "type": "attribute", "weight": 1 }, { "content": "The man is holding an orange basketball.", "type": "action", "weight": 3 }, { "content": "The basketball court has a light-colored floor with red and black lines.", "type": "attribute", "weight": 2 }, { "content": "There are multiple basketball hoops and baskets around the court.", "type": "attribute", "weight": 2 }, { "content": "There are silver benches on the left side of the picture, and a volleyball net and red volleyball posts on the right side of the picture.", "type": "attribute", "weight": 1 }, { "content": "There are multiple hanging lamps on the top of the court, providing ample lighting.", "type": "scene", "weight": 2 } ] }, { "event": "The camera keeps shaking left and right. The man stands in place and dribs the ball, performing some basic basketball moves.", "visual_elements": [ { "content": "The camera keeps shaking left and right.", "type": "camera", "weight": 3 }, { "content": "The man stands still and dribbles the ball, performing some basic basketball moves.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera pans as the man moves to the left. The man pats the ball and moves to the left of the screen. When he reaches the bottom of the basket, the man spins in the air and passes the basketball under his legs, then jumps up and shoots. There are some signs and signs on the white walls of the basketball court. There are also multiple emergency exits and red indicator lights installed on the walls.", "visual_elements": [ { "content": "Then, the camera shakes and moves to the left with the man.", "type": "camera", "weight": 3 }, { "content": "The man dribbles the ball and moves to the left of the screen.", "type": "action", "weight": 3 }, { "content": "When the man reaches the bottom of the basket, he spins in the air and passes the basketball under his legs, then jumps up and shoots.", "type": "action", "weight": 3 }, { "content": "There are some signs and signs on the white walls of the basketball court.", "type": "scene", "weight": 1 }, { "content": "There are also multiple emergency exits and red indicator lights installed on the walls.", "type": "scene", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0797", "video_path": "VIDGEN-1M/XCzpF12AvzY-Scene-0056.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 10.44, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a metal baking pan. Some vegetable leaves and a knife with a black handle are scattered on the wooden table around the baking pan. There is also a mobile phone with a white frame beside the knife.\nThere are two orange-red fish placed on the baking pan. The surfaces of the fish are sprinkled with black pepper and salt, and their bellies are stuffed with green vegetables and yellow lemon slices. There is also a piece of green lemon stuffed in the mouths of the fish.\nThen, the camera zooms in and moves from left to right, focusing on the head of the fish on the right side, showing the fish mouth stuffed with lemon pieces. The fish's eyes are wide open. Subsequently, the camera moves along the body of this fish to the left and forward.\nFinally, the camera tilts to the left. The bodies and tails of the two fish can be seen in the picture, and the white silicone paper on the baking pan reflects the yellow light.", "events": [ { "event": "At the beginning of the video, the camera is focused on a metal baking pan. Some vegetable leaves and a knife with a black handle are scattered on the wooden table around the baking pan. There is also a mobile phone with a white frame beside the knife.", "visual_elements": [ { "content": "The camera is focused on a metal baking pan.", "type": "camera", "weight": 3 }, { "content": "Some vegetable leaves and a knife with a black handle are scattered on the wooden table around the baking pan.", "type": "scene", "weight": 2 }, { "content": "There is also a mobile phone with a white frame beside the knife.", "type": "scene", "weight": 1 } ] }, { "event": "There are two orange-red fish placed on the baking pan. The surfaces of the fish are sprinkled with black pepper and salt, and their bellies are stuffed with green vegetables and yellow lemon slices. There is also a piece of green lemon stuffed in the mouths of the fish.", "visual_elements": [ { "content": "There are two orange-red fish placed on the baking pan.", "type": "scene", "weight": 3 }, { "content": "The surfaces of the fish are sprinkled with black pepper and salt.", "type": "attribute", "weight": 2 }, { "content": "The fish's bellies are stuffed with green vegetables and yellow lemon slices.", "type": "attribute", "weight": 2 }, { "content": "There is also a piece of green lemon stuffed in the mouths of the fish.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, the camera zooms in and moves from left to right, focusing on the head of the fish on the right side, showing the fish mouth stuffed with lemon pieces. The fish's eyes are wide open. Subsequently, the camera moves along the body of this fish to the left and forward.", "visual_elements": [ { "content": "The camera zooms in and moves from left to right.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the head of the fish on the right side.", "type": "camera", "weight": 3 }, { "content": "The picture shows the fish mouth stuffed with lemon pieces.", "type": "scene", "weight": 2 }, { "content": "The fish's eyes are wide open.", "type": "attribute", "weight": 1 }, { "content": "The camera moves along the body of this fish to the left and forward.", "type": "camera", "weight": 3 } ] }, { "event": "Finally, the camera tilts to the left. The bodies and tails of the two fish can be seen in the picture, and the white silicone paper on the baking pan reflects the yellow light.", "visual_elements": [ { "content": "The camera tilts to the left.", "type": "camera", "weight": 3 }, { "content": "The bodies and tails of the two fish can be seen in the picture.", "type": "scene", "weight": 2 }, { "content": "The white silicone paper on the baking pan reflects the yellow light.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 15 }, { "index": "TUNA_0798", "video_path": "VIDGEN-1M/Y0BShYLDg2Y-Scene-0063.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 4.4, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, two middle-aged white male fighters are fighting fiercely in the octagonal cage. The fighter on the left side of the screen wears a red bracelet, has a tattoo on his right arm, and wears blue shorts with a white skull pattern. The fighter on the right side of the screen wears a blue bracelet, has short light brown hair, and wears black shorts with red letters. Both are shirtless and wear black boxing gloves. There is a barbed wire fence around the octagonal cage. There are spectators and photographers in the background, and some people in the audience are taking pictures with cameras. There are some advertising signs on the ground and billboards on the edge of the cage.\nThe camera moves forward and to the left. The right fighter first takes an offensive stance, leaning forward slightly, ready to punch. The left fighter prepares to defend. Then, the right fighter quickly punches, and the left fighter blocks it with his right hand. The right fighter continues to attack, kicking the left fighter's abdomen with his left leg, and the left fighter grabs the right fighter's leg.\nThe camera moves to the right. The left fighter grabs the right fighter's legs, forcing him to bend over and retreat to the right side of the screen. Finally, the right fighter breaks free. Then, the left fighter kicks the right fighter with his left foot. The right fighter retreats to the edge of the cage on the right side of the screen and blocks the attack with both hands. A male referee wearing a black short-sleeved shirt and long pants appears in the cage on the right side of the screen. The referee turns his back to the camera and closely monitors the situation of the game.", "events": [ { "event": "At the beginning of the video, two middle-aged white male fighters are fighting fiercely in the octagonal cage. The fighter on the left side of the screen wears a red bracelet, has a tattoo on his right arm, and wears blue shorts with a white skull pattern. The fighter on the right side of the screen wears a blue bracelet, has short light brown hair, and wears black shorts with red letters. Both are shirtless and wear black boxing gloves. There is a barbed wire fence around the octagonal cage. There are spectators and photographers in the background, and some people in the audience are taking pictures with cameras. There are some advertising signs on the ground and billboards on the edge of the cage.", "visual_elements": [ { "content": "Two middle-aged white male fighters are fighting fiercely in the octagonal cage.", "type": "action", "weight": 3 }, { "content": "The fighter on the left side of the picture is wearing a red bracelet, a tattoo on his right arm, and blue shorts with a white skull pattern.", "type": "attribute", "weight": 3 }, { "content": "The fighter on the right side of the picture is wearing a blue bracelet, has short light brown hair, and wears black shorts with red letters.", "type": "attribute", "weight": 3 }, { "content": "Both fighters are shirtless and wearing black boxing gloves.", "type": "attribute", "weight": 2 }, { "content": "There is a barbed wire fence around the octagonal cage.", "type": "scene", "weight": 2 }, { "content": "There are spectators and photographers in the background, and some people in the audience are taking pictures with cameras.", "type": "scene", "weight": 2 }, { "content": "There are some advertising signs on the ground and billboards on the edge of the cage.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves forward and to the left. The right fighter first takes an offensive stance, leaning forward slightly, ready to punch. The left fighter prepares to defend. Then, the right fighter quickly punches, and the left fighter blocks it with his right hand. The right fighter continues to attack, kicking the left fighter's abdomen with his left leg, and the left fighter grabs the right fighter's leg.", "visual_elements": [ { "content": "The camera moves forward and to the left.", "type": "camera", "weight": 3 }, { "content": "The right fighter first takes an offensive stance, leaning forward slightly, ready to punch.", "type": "action", "weight": 2 }, { "content": "The left fighter prepares to defend.", "type": "action", "weight": 2 }, { "content": "Then, the right fighter quickly punches, and the left fighter blocks it with his right hand.", "type": "action", "weight": 3 }, { "content": "The right fighter continues to attack, kicking the left fighter's abdomen with his left leg.", "type": "action", "weight": 3 }, { "content": "The left fighter grabs the right fighter's leg.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves to the right. The left fighter grabs the right fighter's legs, forcing him to bend over and retreat to the right side of the screen. Finally, the right fighter breaks free. Then, the left fighter kicks the right fighter with his left foot. The right fighter retreats to the edge of the cage on the right side of the screen and blocks the attack with both hands. A male referee wearing a black short-sleeved shirt and long pants appears in the cage on the right side of the screen. The referee turns his back to the camera and closely monitors the situation of the game.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The left fighter grabs the right fighter's legs, forcing him to bend over and retreat to the right side of the screen.", "type": "action", "weight": 3 }, { "content": "Finally, the right fighter breaks free.", "type": "action", "weight": 3 }, { "content": "Then, the left fighter kicks the right fighter with his left foot.", "type": "action", "weight": 2 }, { "content": "The right fighter retreats to the edge of the cage on the right side of the screen and blocks the attack with both hands.", "type": "action", "weight": 2 }, { "content": "A male referee wearing a black short-sleeved shirt and long pants appears in the cage on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The referee turns his back to the camera and closely monitors the situation of the game.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0799", "video_path": "VIDGEN-1M/Y9l_-hXSh10-Scene-0040.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 8.61, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on several pieces of sausages that have been cut and placed on a white plate. The surfaces of the sausages are reddish-brown with a slight burn mark. There is a pile of squeezed tomato sauce placed on one side of the sausages. The tomato sauce is bright red. There is a black fork placed on the other side of the sausages.\nThe camera moves slowly from left to right to continue showing the cut sausages. There is a small pile of yellow mustard sauce beside the tomato sauce in the picture.", "events": [ { "event": "At the beginning of the video, the camera is focused on several pieces of sausages that have been cut and placed on a white plate. The surfaces of the sausages are reddish-brown with a slight burn mark. There is a pile of squeezed tomato sauce placed on one side of the sausages. The tomato sauce is bright red. There is a black fork placed on the other side of the sausages.", "visual_elements": [ { "content": "The camera is focused on several pieces of sausages that have been cut and placed on a white plate.", "type": "camera", "weight": 3 }, { "content": "The surfaces of the sausages are reddish-brown.", "type": "attribute", "weight": 2 }, { "content": "The surface of the sausages has a slight burn mark.", "type": "attribute", "weight": 2 }, { "content": "There is a pile of squeezed tomato sauce placed on one side of the sausages.", "type": "attribute", "weight": 3 }, { "content": "The tomato sauce is bright red.", "type": "attribute", "weight": 2 }, { "content": "There is a black fork placed on the other side of the sausages.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves slowly from left to right to continue showing the cut sausages. There is a small pile of yellow mustard sauce beside the tomato sauce in the picture.", "visual_elements": [ { "content": "The camera moves slowly from left to right to continue showing the cut sausages.", "type": "camera", "weight": 3 }, { "content": "There is a small pile of yellow mustard sauce beside the tomato sauce in the picture.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0800", "video_path": "VIDGEN-1M/YRpe-wR0iik-Scene-0051.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 21.35, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white man standing on a large rock. The camera keeps a close-up shot. The man is shirtless and wears a silver necklace around his neck. The man has short blond hair, a strong body, and obvious muscle lines. In the background is a flowing river. There are many rocks interspersed in the river on the right side of the picture. There are dense forests around.\nThe camera moves slightly backwards, shaking. The man is wearing white shorts with black patterns and white sneakers. He uses gestures and body language to express something while speaking. The man then turns his back to the camera, walks forward a few steps, and stands on the edge of the stone. Then, the man turns back to the camera again and walks back and forth on the stone. Finally, the man looks around and stands in the middle of the picture. The river water on the right side of the picture is clear. Some water flows over the stone, forming a small waterfall.\nThe camera moves to the right following the man's movements. The man does three backflips to the right of the screen, his body spinning in the air, and finally lands steadily on the rock. The sun shines on the rock, casting the man's figure.\nFinally, the camera pans toward the man. After the man stands firm, he high-fives a hand that appears on the left side of the screen. Then the man steps back while talking, his left hand pointing to the lower right side of the screen.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white man standing on a large rock. The camera keeps a close-up shot. The man is shirtless and wears a silver necklace around his neck. The man has short blond hair, a strong body, and obvious muscle lines. In the background is a flowing river. There are many rocks interspersed in the river on the right side of the picture. There are dense forests around.", "visual_elements": [ { "content": "The camera is focused on a young white man standing on a large rock.", "type": "camera", "weight": 3 }, { "content": "The camera keeps a close-up shot.", "type": "camera", "weight": 2 }, { "content": "The man is shirtless and wears a silver necklace around his neck.", "type": "attribute", "weight": 3 }, { "content": "The man has short blond hair, a strong body, and obvious muscle lines.", "type": "attribute", "weight": 3 }, { "content": "The background is a flowing river.", "type": "scene", "weight": 3 }, { "content": "There are many rocks interspersed in the river on the right side of the picture.", "type": "scene", "weight": 2 }, { "content": "The river is surrounded by dense woods.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves slightly backwards, shaking. The man is wearing white shorts with black patterns and white sneakers. He uses gestures and body language to express something while speaking. The man then turns his back to the camera, walks forward a few steps, and stands on the edge of the stone. Then, the man turns back to the camera again and walks back and forth on the stone. Finally, the man looks around and stands in the middle of the picture. The river water on the right side of the picture is clear. Some water flows over the stone, forming a small waterfall.", "visual_elements": [ { "content": "The camera moves slightly backwards while shaking.", "type": "camera", "weight": 3 }, { "content": "The man is wearing white shorts with black patterns and white sneakers.", "type": "attribute", "weight": 2 }, { "content": "The man uses gestures and body language to express something while speaking.", "type": "action", "weight": 3 }, { "content": "The man then turns his back to the camera, walks forward a few steps, and stands on the edge of the stone.", "type": "action", "weight": 3 }, { "content": "Then, the man turns back to the camera again and walks back and forth on the stone.", "type": "action", "weight": 2 }, { "content": "Finally, the man looks around and stands in the middle of the picture.", "type": "action", "weight": 2 }, { "content": "The river water on the right side of the picture is clear.", "type": "scene", "weight": 2 }, { "content": "Some water flows over the stone, forming a small waterfall.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the right following the man's movements. The man does three backflips to the right of the screen, his body spinning in the air, and finally lands steadily on the rock. The sun shines on the rock, casting the man's figure.", "visual_elements": [ { "content": "The camera moves to the right following the man's movements.", "type": "camera", "weight": 3 }, { "content": "The man does three backflips to the right of the screen, his body spinning in the air.", "type": "action", "weight": 3 }, { "content": "The man finally lands steadily on the rock.", "type": "action", "weight": 1 }, { "content": "The sunlight shines on the rock, casting the man's figure.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera pans toward the man. After the man stands firm, he high-fives a hand that appears on the left side of the screen. Then the man steps back while talking, his left hand pointing to the lower right side of the screen.", "visual_elements": [ { "content": "Finally, the camera pans toward the man.", "type": "camera", "weight": 3 }, { "content": "The man stands firm and high-fives a hand that appears on the left side of the screen.", "type": "action", "weight": 2 }, { "content": "Then the man backs away while talking, his left hand pointing to the lower right side of the screen.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0801", "video_path": "VIDGEN-1M/Za_NHKRbQ3s-Scene-0058.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 7.94, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a middle-aged white man. The man is wearing a black coat and a black and gray plaid shirt, standing in a factory workshop. In the background, there are some stacked wooden pallets on the left side of the screen, and there are some purple-packaged items in front of the pallets and on the right side of the screen. There are several workers in the factory workshop, most of them wearing purple work clothes, working busily. In the distance, there are neatly arranged silver workbenches.\nThen, the camera moves slightly to the left. The factory's machinery appears from the top of the screen. The man faces the camera, speaking and pointing at the machinery behind him. On the right side of the screen, a worker in a gray short-sleeved shirt pushes the purple-wrapped item to the left. Another worker in a purple overalls holds the upper machine up to aim at the purple-wrapped item.\nFinally, the camera moves slightly upwards and focuses on the man's face. The man looks up at the camera and speaks. The worker in purple overalls leans over the machine above and aims at the purple-packaged item.", "events": [ { "event": "At the beginning of the video, the camera is focused on a middle-aged white man. The man is wearing a black coat and a black and gray plaid shirt, standing in a factory workshop. In the background, there are some stacked wooden pallets on the left side of the screen, and there are some purple-packaged items in front of the pallets and on the right side of the screen. There are several workers in the factory workshop, most of them wearing purple work clothes, working busily. In the distance, there are neatly arranged silver workbenches.", "visual_elements": [ { "content": "The camera is pointed at a middle-aged white male.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black coat and a black and gray plaid shirt.", "type": "attribute", "weight": 2 }, { "content": "The man is standing in a factory workshop.", "type": "scene", "weight": 3 }, { "content": "There are some stacked wooden pallets in the background on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "There are some purple-packaged items in front of the pallets and on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "Several workers are busy working in the factory workshop.", "type": "action", "weight": 2 }, { "content": "Most of the workers are wearing purple work clothes.", "type": "attribute", "weight": 1 }, { "content": "There are neatly arranged silver workbenches in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves slightly to the left. The factory's machinery appears from the top of the screen. The man faces the camera, speaking and pointing at the machinery behind him. On the right side of the screen, a worker in a gray short-sleeved shirt pushes the purple-wrapped item to the left. Another worker in a purple overalls holds the upper machine up to aim at the purple-wrapped item.", "visual_elements": [ { "content": "Then, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The factory's machinery and equipment appear from the top of the screen.", "type": "attribute", "weight": 2 }, { "content": "The man faces the camera, pointing to the machinery and equipment behind him while talking.", "type": "action", "weight": 3 }, { "content": "On the right side of the screen, a worker in a gray short-sleeved shirt pushes the purple-packaged items to the left side of the screen.", "type": "action", "weight": 2 }, { "content": "Another worker in a purple work suit picks up the machinery and equipment above and points it at the purple-packaged items.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera moves slightly upwards and focuses on the man's face. The man looks up at the camera and speaks. The worker in purple overalls leans over the machine above and aims at the purple-packaged item.", "visual_elements": [ { "content": "Finally, the camera moves slightly upward.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the man's face.", "type": "camera", "weight": 3 }, { "content": "The man looks up at the camera and speaks.", "type": "action", "weight": 3 }, { "content": "The worker in purple overalls leans over the machine above and aims at the purple-packaged item.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0802", "video_path": "VIDGEN-1M/ZunXRAgLyk0-Scene-0169.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 5.8, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an insect on the ground. The insect is black, small, with an elongated body and long legs. The background is soil and green stems of plants, and the light is bright.\nThe camera moves down to capture the insect walking in the dirt. Only the insect's head appears at the top of the frame.\nThe camera moves upward to capture the whole body of the insect, which jumps up and down flexibly.", "events": [ { "event": "At the beginning of the video, the camera focuses on an insect on the ground. The insect is black, small, with an elongated body and long legs. The background is soil and green stems of plants, and the light is bright.", "visual_elements": [ { "content": "The camera focuses on an insect on the ground.", "type": "camera", "weight": 3 }, { "content": "The insect's body is black.", "type": "attribute", "weight": 3 }, { "content": "The insect is small, with an elongated body and long legs.", "type": "attribute", "weight": 2 }, { "content": "The background is soil and green stems of plants.", "type": "scene", "weight": 3 }, { "content": "The light is bright.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves down to capture the insect walking in the dirt. Only the insect's head appears at the top of the frame.", "visual_elements": [ { "content": "The camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The camera captures the insect walking in the soil.", "type": "action", "weight": 3 }, { "content": "Only the insect's head appears at the top of the frame.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves upward to capture the whole body of the insect, which jumps up and down flexibly.", "visual_elements": [ { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "The camera captures the insect's entire body.", "type": "camera", "weight": 2 }, { "content": "The insect jumps up and down flexibly.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0803", "video_path": "VIDGEN-1M/_Gt9A67Re8o-Scene-0344.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 15.02, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera tilting upwards to picture the top of a dense bamboo forest. The camera moves slowly downward and forward at first, then the downward movement speeds up to show a passageway with rocks and dead leaves, flanked by a dense bamboo forest.\nThe camera pushes forward and the camera focuses on a narrow passageway.", "events": [ { "event": "The video begins with the camera tilting upwards to picture the top of a dense bamboo forest. The camera moves slowly downward and forward at first, then the downward movement speeds up to show a passageway with rocks and dead leaves, flanked by a dense bamboo forest.", "visual_elements": [ { "content": "The camera pans upward.", "type": "camera", "weight": 3 }, { "content": "The scene is the top of a dense bamboo forest.", "type": "scene", "weight": 3 }, { "content": "The camera moves slowly downward and forward at first, and then downward faster.", "type": "camera", "weight": 3 }, { "content": "A passageway with rocks and dead leaves.", "type": "scene", "weight": 3 }, { "content": "On both sides is a thick bamboo forest.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pushes forward and the camera focuses on a narrow passageway.", "visual_elements": [ { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on a long, narrow passageway.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0804", "video_path": "VIDGEN-1M/_HcvlUee544-Scene-0047.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 8.58, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a dark-colored body of water with overgrown grass, a patch of greenery in the distance, and dim lighting. There is a small boat moored in the water and the man's in the boat is holding a fishing rod.\nThe camera first moves to the left as the man adjusts his line. The camera moves to the right with the movement of the hand, and the man in the boat adjusts the line in a different way.", "events": [ { "event": "The video begins with a dark-colored body of water with overgrown grass, a patch of greenery in the distance, and dim lighting. There is a small boat moored in the water and the man's in the boat is holding a fishing rod.", "visual_elements": [ { "content": "The scene is a dark colored water with weeds.", "type": "scene", "weight": 3 }, { "content": "In the distance, there's a patch of greenery.", "type": "scene", "weight": 2 }, { "content": "A small boat is moored on the water.", "type": "scene", "weight": 2 }, { "content": "The man in the boat is holding a fishing rod.", "type": "scene", "weight": 3 } ] }, { "event": "The camera first moves to the left as the man adjusts his line. The camera moves to the right with the movement of the hand, and the man in the boat adjusts the line in a different way.", "visual_elements": [ { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The man is adjusting his line.", "type": "action", "weight": 3 }, { "content": "The camera moves to the right with the movement of the hand.", "type": "camera", "weight": 3 }, { "content": "The man in the boat adjusts his line in a different way.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0805", "video_path": "VIDGEN-1M/_Wcmw9_IMSU-Scene-0049.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 4.84, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a young Asian man on a soccer field. The man is wearing a white short-sleeved sweatshirt with red stripes, black shorts with yellow stripes, and a pair of black and white sneakers. The man is standing on the grass of a large soccer field, doing a juggling exercise. In the background is an empty red-seated stand. In front of the stand are neatly arranged green bushes. White lines are drawn on the neatly arranged grass.\nThe camera moves to the upper left along the trajectory of the ball. The man lifts the ball with his right foot and quickly catches it with his left foot. Then the man lifts the ball with his knee. The ball flips in the air and lands on the man's head. The man's movements are agile and the ball is always under his control. In the distance, there is a person in orange clothes walking on the left side of the screen.", "events": [ { "event": "The video begins with a young Asian man on a soccer field. The man is wearing a white short-sleeved sweatshirt with red stripes, black shorts with yellow stripes, and a pair of black and white sneakers. The man is standing on the grass of a large soccer field, doing a juggling exercise. In the background is an empty red-seated stand. In front of the stand are neatly arranged green bushes. White lines are drawn on the neatly arranged grass.", "visual_elements": [ { "content": "The camera is focused on a young Asian man on a soccer field.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a white short-sleeved sweatshirt with red stripes, black shorts with yellow stripes, and a pair of black and white sneakers.", "type": "attribute", "weight": 3 }, { "content": "The man is standing on the lawn of a large soccer field, practicing juggling.", "type": "action", "weight": 3 }, { "content": "The background is an empty red-seated stand.", "type": "scene", "weight": 3 }, { "content": "There are neat green bushes in front of the stand.", "type": "scene", "weight": 1 }, { "content": "White lines are drawn on the neat lawn.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves to the upper left along the trajectory of the ball. The man lifts the ball with his right foot and quickly catches it with his left foot. Then the man lifts the ball with his knee. The ball flips in the air and lands on the man's head. The man's movements are agile and the ball is always under his control. In the distance, there is a person in orange clothes walking on the left side of the screen.", "visual_elements": [ { "content": "The camera moves to the upper left along the trajectory of the ball.", "type": "camera", "weight": 3 }, { "content": "The man lifts the ball with his right foot and quickly catches it with his left foot.", "type": "action", "weight": 3 }, { "content": "The man then lifts the ball with his knee.", "type": "action", "weight": 3 }, { "content": "The ball flips in the air and lands on the man's head.", "type": "action", "weight": 3 }, { "content": "The man is agile and the ball is always under his control.", "type": "attribute", "weight": 1 }, { "content": "In the distance, a person in orange clothes is walking on the left side of the screen.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0806", "video_path": "VIDGEN-1M/aQj1l4ypVyk-Scene-0049.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 6.97, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is fixed on a kart, with the view facing forward. The driver in the picture is holding the steering wheel, wearing light-colored pants and gray sneakers with brown shoelaces. The kart is on the left side of the picture, driving towards another black and red kart on the right side of the picture. The driver of the kart on the right side of the picture is wearing a yellow helmet, a black short-sleeved shirt and dark blue pants. The track is located in a spacious indoor venue with bright lights on the white ceiling. On the right side of the picture, there are murals and billboards of racing cars on the wall of the track.\nThen, the kart in front turns to the left of the screen, and the kart with the camera directly hits the kart in front, then turns left and enters another empty track. In the distance, other karts are also driving on other tracks on the left side of the screen. There are rubber guardrails on both sides of the track, and the guardrails in some areas are red and white. There are many white columns in the venue. There are a few small white windows on the red wall in the background.", "events": [ { "event": "At the beginning of the video, the camera is fixed on a kart, with the view facing forward. The driver in the picture is holding the steering wheel, wearing light-colored pants and gray sneakers with brown shoelaces. The kart is on the left side of the picture, driving towards another black and red kart on the right side of the picture. The driver of the kart on the right side of the picture is wearing a yellow helmet, a black short-sleeved shirt and dark blue pants. The track is located in a spacious indoor venue with bright lights on the white ceiling. On the right side of the picture, there are murals and billboards of racing cars on the wall of the track.", "visual_elements": [ { "content": "The camera is fixed on a kart, with the view facing forward.", "type": "camera", "weight": 3 }, { "content": "The driver in the picture is holding the steering wheel, wearing light-colored pants and gray sneakers with brown shoelaces.", "type": "attribute", "weight": 3 }, { "content": "The kart is on the left side of the picture, driving towards another black and red kart on the right side of the picture.", "type": "action", "weight": 3 }, { "content": "The driver of the kart on the right side of the picture is wearing a yellow helmet, a black short-sleeved shirt and dark blue pants.", "type": "attribute", "weight": 2 }, { "content": "The track is located in a spacious indoor venue.", "type": "scene", "weight": 3 }, { "content": "There are bright lights on the white ceiling.", "type": "scene", "weight": 2 }, { "content": "On the right side of the picture, there are murals and billboards of racing cars on the walls of the track.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the kart in front turns to the left of the screen, and the kart with the camera directly hits the kart in front, then turns left and enters another empty track. In the distance, other karts are also driving on other tracks on the left side of the screen. There are rubber guardrails on both sides of the track, and the guardrails in some areas are red and white. There are many white columns in the venue. There are a few small white windows on the red wall in the background.", "visual_elements": [ { "content": "Then, the kart in front turns to the left of the picture.", "type": "action", "weight": 2 }, { "content": "The kart with the camera directly hits the kart in front, then turns left and enters another empty track.", "type": "action", "weight": 3 }, { "content": "In the distance, other karts are also driving on other tracks on the left side of the picture.", "type": "scene", "weight": 1 }, { "content": "There are rubber guardrails on both sides of the track.", "type": "scene", "weight": 2 }, { "content": "The guardrails in some areas are red and white.", "type": "attribute", "weight": 1 }, { "content": "There are many white columns in the venue.", "type": "scene", "weight": 2 }, { "content": "There are a few small white windows on the red wall in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0807", "video_path": "VIDGEN-1M/ajUiVTwAF6U-Scene-0002.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 4.53, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a beach volleyball match. There are two young white female players on each side of the match. The player wearing a green top and black shorts is facing the camera on one side of the net. The player wearing a burgundy top and black shorts is facing away from the camera on the other side of the net. The players' tops are printed with white numbers. The players from both sides stand in a staggered manner. There are several white tents with red advertising logos printed on them in the distance on the beach. The table under the tent is covered with a black tablecloth with white letters. There is a row of people sitting behind the table, and there are spectators standing behind them. There are some tall buildings and palm trees behind the tent.\nThe camera pans to the upper left. A player from the green team on the left side of the screen bends down to catch the ball, and then another player from the green team on the right side of the screen catches the ball again. Then, the player from the green team on the left side of the screen jumps up to spike the ball. Then, the player from the burgundy team on the left side of the screen quickly jumps up and successfully hits the ball upwards again. The player from the burgundy team on the right side of the screen closely follows the movement of the ball, ready to catch it at any time. The player from the burgundy team wears black sunglasses.", "events": [ { "event": "The video begins with the camera focusing on a beach volleyball match. There are two young white female players on each side of the match. The player wearing a green top and black shorts is facing the camera on one side of the net. The player wearing a burgundy top and black shorts is facing away from the camera on the other side of the net. The players' tops are printed with white numbers. The players from both sides stand in a staggered manner. There are several white tents with red advertising logos printed on them in the distance on the beach. The table under the tent is covered with a black tablecloth with white letters. There is a row of people sitting behind the table, and there are spectators standing behind them. There are some tall buildings and palm trees behind the tent.", "visual_elements": [ { "content": "The camera is focused on a beach volleyball match.", "type": "camera", "weight": 3 }, { "content": "There are two young white female athletes on each side of the match.", "type": "attribute", "weight": 3 }, { "content": "The one wearing a green top and black shorts is facing the camera on one side of the net.", "type": "attribute", "weight": 3 }, { "content": "The one wearing a burgundy top and black shorts is facing away from the camera on the other side of the net.", "type": "attribute", "weight": 3 }, { "content": "The athletes' tops are printed with white numbers.", "type": "attribute", "weight": 1 }, { "content": "The athletes on both sides stand in a staggered manner.", "type": "action", "weight": 2 }, { "content": "There are several white tents with red advertising logos printed on the far side of the beach.", "type": "scene", "weight": 2 }, { "content": "The table under the tent is covered with a black tablecloth with white letters.", "type": "scene", "weight": 2 }, { "content": "There is a row of people sitting behind the table, and there are spectators standing behind it.", "type": "scene", "weight": 1 }, { "content": "There are some high-rise buildings and palm trees behind the tent.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans to the upper left. A player from the green team on the left side of the screen bends down to catch the ball, and then another player from the green team on the right side of the screen catches the ball again. Then, the player from the green team on the left side of the screen jumps up to spike the ball. Then, the player from the burgundy team on the left side of the screen quickly jumps up and successfully hits the ball upwards again. The player from the burgundy team on the right side of the screen closely follows the movement of the ball, ready to catch it at any time. The player from the burgundy team wears black sunglasses.", "visual_elements": [ { "content": "The camera moves to the upper left in a swaying manner.", "type": "camera", "weight": 3 }, { "content": "A player from the green team on the left side of the screen bends down to catch the ball.", "type": "action", "weight": 3 }, { "content": "Then another player from the green team on the right side of the screen catches the ball again.", "type": "action", "weight": 3 }, { "content": "Then, the player from the green team on the left side of the screen jumps up to spike the ball.", "type": "action", "weight": 3 }, { "content": "The player from the burgundy team on the left side of the screen quickly jumps up and successfully hits the ball upward again.", "type": "action", "weight": 3 }, { "content": "The player from the burgundy team on the right side of the screen pays close attention to the movement of the ball and is ready to catch the ball at any time.", "type": "action", "weight": 1 }, { "content": "The player from the burgundy team wears black sunglasses.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0808", "video_path": "VIDGEN-1M/bJkiU-ao5Zs-Scene-0051.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 13.23, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on an indoor skatepark. A young white male wearing a white shirt with fuchsia sleeves and black pants stands on a skateboard and a black hat. The man bends down and prepares to start skating. There are multiple skateboard ramps and platforms on the smooth ground of the venue, and the background wall is white.\nThe camera follows the man's movements to the left. The man starts to slide and tries to do a skateboard trick. The man jumps up and the skateboard spins in the air. Then, the man successfully jumps onto the skateboard with his back to the camera. The man leans forward and stretches his hands to the ground to keep his balance. After standing firmly, the man continues to slide to the left side of the screen. A small door on the white wall on the left side of the screen allows natural daylight to shine through. There are yellow cardboard boxes of different heights in the lower left of the screen. There is a wooden table in front of the wall to the right of the man. There are some sundries on the table.\nThen, the camera moves to the right, and a young white male wearing a red T-shirt, khaki pants, and black sneakers with white stripes appears in the middle of the frame. The man in red stands on a skateboard, ready to start skating.\nThe camera moves left again, following the movements of the man in red. The man in red starts to skate and tries to do a skateboard trick. The man in red jumps up and the skateboard spins in the air. The man in red successfully jumps onto the skateboard with his back to the camera. The man in red then stands up on the skateboard and skates to the left of the frame, facing the camera.\nThe camera moves to the right and cuts back to the first young man in purple. The man in purple tries the same move again. After the man in purple jumps up, his skateboard spins in the air. But the skateboard flips over, and the man in purple falls, supporting himself on the ground with his hands and slowly standing up.\nAt the end of the video, the camera switches to the side with the door. On the left side of the screen, the man in purple and red is smiling at the camera with something in his hand. On the right side of the screen, the man in red is skating on a skateboard.", "events": [ { "event": "The video begins with the camera focused on an indoor skatepark. A young white male wearing a white shirt with fuchsia sleeves and black pants stands on a skateboard and a black hat. The man bends down and prepares to start skating. There are multiple skateboard ramps and platforms on the smooth ground of the venue, and the background wall is white.", "visual_elements": [ { "content": "The camera is focused on an indoor skatepark.", "type": "camera", "weight": 3 }, { "content": "A young Caucasian male wearing a white shirt with fuchsia sleeves and black pants stands on a skateboard and a black hat.", "type": "attribute", "weight": 3 }, { "content": "The man bends down and prepares to start skating.", "type": "action", "weight": 3 }, { "content": "There are multiple skateboard ramps and platforms on the smooth ground inside the venue.", "type": "scene", "weight": 2 }, { "content": "The background wall is white.", "type": "scene", "weight": 2 } ] }, { "event": "The camera follows the man's movements to the left. The man starts to slide and tries to do a skateboard trick. The man jumps up and the skateboard spins in the air. Then, the man successfully jumps onto the skateboard with his back to the camera. The man leans forward and stretches his hands to the ground to keep his balance. After standing firmly, the man continues to slide to the left side of the screen. A small door on the white wall on the left side of the screen allows natural daylight to shine through. There are yellow cardboard boxes of different heights in the lower left of the screen. There is a wooden table in front of the wall to the right of the man. There are some sundries on the table.", "visual_elements": [ { "content": "The camera moves to the left following the man's movements.", "type": "camera", "weight": 3 }, { "content": "The man starts to slide and tries to do a skateboard trick.", "type": "action", "weight": 3 }, { "content": "The man jumps up and the skateboard spins in the air.", "type": "action", "weight": 2 }, { "content": "Then, the man successfully jumps onto the skateboard with his back to the camera.", "type": "action", "weight": 2 }, { "content": "The man leans forward and stretches his hands to the ground to keep his balance.", "type": "action", "weight": 1 }, { "content": "After standing firm, the man continues to slide to the left side of the screen.", "type": "action", "weight": 2 }, { "content": "A small door on the white wall on the left side of the screen lets in natural daylight.", "type": "scene", "weight": 1 }, { "content": "There are yellow cardboard boxes of different heights on the lower left side of the screen.", "type": "scene", "weight": 2 }, { "content": "There is a wooden table in front of the wall to the right of the man.", "type": "scene", "weight": 1 }, { "content": "There are some sundries on the table.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves to the right, and a young white male wearing a red T-shirt, khaki pants, and black sneakers with white stripes appears in the middle of the frame. The man in red stands on a skateboard, ready to start skating.", "visual_elements": [ { "content": "Then, the camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "A young white man wearing a red T-shirt, khaki pants and black sneakers with white stripes appears in the middle of the frame.", "type": "attribute", "weight": 3 }, { "content": "The man in red stands on a skateboard, ready to start skating.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves left again, following the movements of the man in red. The man in red starts to skate and tries to do a skateboard trick. The man in red jumps up and the skateboard spins in the air. The man in red successfully jumps onto the skateboard with his back to the camera. The man in red then stands up on the skateboard and skates to the left of the frame, facing the camera.", "visual_elements": [ { "content": "The camera follows the man in red's movements and moves to the left again.", "type": "camera", "weight": 3 }, { "content": "The man in red starts to skate and tries to do a skateboard trick.", "type": "action", "weight": 3 }, { "content": "The man in red jumps up and the skateboard spins in the air.", "type": "action", "weight": 2 }, { "content": "The man in red successfully jumps onto the skateboard with his back to the camera.", "type": "action", "weight": 2 }, { "content": "Then the man in red stands up on the skateboard and skates to the left of the screen facing the camera.", "type": "action", "weight": 1 } ] }, { "event": "The camera moves to the right and cuts back to the first young man in purple. The man in purple tries the same move again. After the man in purple jumps up, his skateboard spins in the air. But the skateboard flips over, and the man in purple falls, supporting himself on the ground with his hands and slowly standing up.", "visual_elements": [ { "content": "The camera moves to the right and cuts back to the first young man in purple.", "type": "camera", "weight": 3 }, { "content": "The man in purple tries to do the same action again.", "type": "action", "weight": 3 }, { "content": "After the man in purple jumps up, the skateboard spins in the air.", "type": "action", "weight": 2 }, { "content": "But the skateboard flips over, and the man in purple falls down, supporting himself on the ground with his hands and slowly standing up.", "type": "action", "weight": 2 } ] }, { "event": "At the end of the video, the camera switches to the side with the door. On the left side of the screen, the man in purple and red is smiling at the camera with something in his hand. On the right side of the screen, the man in red is skating on a skateboard.", "visual_elements": [ { "content": "At the end of the video, the camera switches to the side of the venue with a door.", "type": "camera", "weight": 3 }, { "content": "The man in purple on the left side of the screen smiles at the camera, holding something in his hand.", "type": "action", "weight": 1 }, { "content": "The man in red on the right side of the screen slides on the skateboard.", "type": "action", "weight": 1 } ] } ], "n_events": 6, "n_elements": 30 }, { "index": "TUNA_0809", "video_path": "VIDGEN-1M/bUFaufcUk9w-Scene-0134.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 6.04, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the interior of a convertible, with the front seats, center console, and underside of the car visible in the frame. The front seats are light yellow leather. The center of the center console is reddish-brown wood trimmed with silver buttons and a white steering wheel near the camera. The bottom of the car is reddish-brown wood.\nThe camera moves along the interior of the car to the right, where the rear seats and the right door are also made of light yellow leather with metal handles and some controls.", "events": [ { "event": "The video begins with the camera focusing on the interior of a convertible, with the front seats, center console, and underside of the car visible in the frame. The front seats are light yellow leather. The center of the center console is reddish-brown wood trimmed with silver buttons and a white steering wheel near the camera. The bottom of the car is reddish-brown wood.", "visual_elements": [ { "content": "The camera focuses on the interior of a convertible car.", "type": "camera", "weight": 3 }, { "content": "The front seats, center console and underside of the car can be seen in the frame.", "type": "attribute", "weight": 3 }, { "content": "The front seats are light yellow leather.", "type": "attribute", "weight": 3 }, { "content": "The center of the center console has reddish brown wood trim.", "type": "attribute", "weight": 2 }, { "content": "There are silver buttons on top of the center console.", "type": "attribute", "weight": 2 }, { "content": "There is also a white steering wheel near the camera.", "type": "attribute", "weight": 3 }, { "content": "The bottom of the car is reddish brown wood.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves along the interior of the car to the right, where the rear seats and the right door are also made of light yellow leather with metal handles and some controls.", "visual_elements": [ { "content": "The camera moves to the right along the interior of the car.", "type": "camera", "weight": 3 }, { "content": "The rear seats and the right door are also made of light yellow leather.", "type": "attribute", "weight": 3 }, { "content": "The doors have metal handles and some control buttons.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0810", "video_path": "VIDGEN-1M/c6BNHLelN0Y-Scene-0043.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 3.64, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young white female figure skater wearing a red dress with fine glitter. The woman wears white ice skates and her hair is tied into a neat bun and decorated with red hair accessories. There are billboards with various brands' logos and texts on the fence of the ice rink. The white ice surface is very smooth and reflects the brightness of the lights, making the whole scene very bright.\nThe camera always follows the contestant's movements, capturing the contestant's performance mainly from the side. The contestant starts skating facing the camera, with her arms spread out and her body slightly leaning forward, showing a beautiful posture. Then, the contestant turns around and skates to the right side of the screen, gradually raising her right foot, then her right hand touches her right foot from behind and her left hand opens forward. Then she raises her right foot to her head and her hands touch her right foot from behind. Finally, the contestant puts her right foot down, opens her hands, and makes a beautiful gesture in front of her face.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young white female figure skater wearing a red dress with fine glitter. The woman wears white ice skates and her hair is tied into a neat bun and decorated with red hair accessories. There are billboards with various brands' logos and texts on the fence of the ice rink. The white ice surface is very smooth and reflects the brightness of the lights, making the whole scene very bright.", "visual_elements": [ { "content": "The camera focuses on a young Caucasian female figure skater wearing a red dress with fine glitter.", "type": "camera", "weight": 3 }, { "content": "The woman wears white skates and her hair is tied into a neat bun and decorated with a red hair accessory.", "type": "attribute", "weight": 2 }, { "content": "The fence of the ice rink has billboards with logos and texts of various brands.", "type": "scene", "weight": 1 }, { "content": "The white ice surface is very smooth.", "type": "scene", "weight": 2 }, { "content": "The brightness of the lights is reflected on the ice surface, making the whole scene very bright.", "type": "scene", "weight": 1 } ] }, { "event": "The camera always follows the contestant's movements, capturing the contestant's performance mainly from the side. The contestant starts skating facing the camera, with her arms spread out and her body slightly leaning forward, showing a beautiful posture. Then, the contestant turns around and skates to the right side of the screen, gradually raising her right foot, then her right hand touches her right foot from behind and her left hand opens forward. Then she raises her right foot to her head and her hands touch her right foot from behind. Finally, the contestant puts her right foot down, opens her hands, and makes a beautiful gesture in front of her face.", "visual_elements": [ { "content": "The camera always follows the contestant's movements, mainly capturing the contestant's performance from the side.", "type": "camera", "weight": 3 }, { "content": "The female contestant starts skating facing the camera, with her arms spread out and her body slightly leaning forward, showing a beautiful posture.", "type": "action", "weight": 3 }, { "content": "Then, the female contestant turns around and skates to the right side of the screen, gradually raising her right foot.", "type": "action", "weight": 3 }, { "content": "Then the female contestant reaches back to touch her right foot with her right hand and opens her left hand forward.", "type": "action", "weight": 2 }, { "content": "The female contestant then raises her right foot to her head and reaches back to touch her right foot with both hands.", "type": "action", "weight": 2 }, { "content": "Finally, the female contestant puts her right foot down, opens her hands, and makes a beautiful gesture in front of her face.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0811", "video_path": "VIDGEN-1M/cBVsqFUBjpk-Scene-0080.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 7.56, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a partial view of a mechanical device, mainly showing a shock absorber with red springs and a couple of black batteries. The batteries are labeled in white and are connected to each other by red and black wires. A number of tools and equipment can be seen in the background, including a yellow tape measure, black tires underneath the tape measure, and some loose wires.\nAs the camera slowly turns counterclockwise to the right, more mechanics can be seen, including a metal frame and connecting parts.\nAs the camera continues to move counterclockwise, the battery is clearly marked with a brand logo and model information, and some metal bolts and fixtures can be seen.", "events": [ { "event": "The video begins with the camera focusing on a partial view of a mechanical device, mainly showing a shock absorber with red springs and a couple of black batteries. The batteries are labeled in white and are connected to each other by red and black wires. A number of tools and equipment can be seen in the background, including a yellow tape measure, black tires underneath the tape measure, and some loose wires.", "visual_elements": [ { "content": "The camera focuses on a part of the mechanism.", "type": "camera", "weight": 3 }, { "content": "It mainly shows a shock absorber with a red spring and a couple of black batteries..", "type": "attribute", "weight": 3 }, { "content": "The batteries are labeled in white.", "type": "attribute", "weight": 2 }, { "content": "The batteries are connected by red and black wires.", "type": "attribute", "weight": 2 }, { "content": "Some tools and equipment can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "The equipment consists of a yellow tape measure, black tires under the tape measure, and some loose wires.", "type": "scene", "weight": 2 } ] }, { "event": "As the camera slowly turns counterclockwise to the right, more mechanics can be seen, including a metal frame and connecting parts.", "visual_elements": [ { "content": "The camera slowly turns counterclockwise to the right.", "type": "camera", "weight": 3 }, { "content": "More mechanical structures can be seen.", "type": "attribute", "weight": 1 }, { "content": "The structure includes a metal frame and connecting parts.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues to move counterclockwise, the battery is clearly marked with a brand logo and model information, and some metal bolts and fixtures can be seen.", "visual_elements": [ { "content": "The camera continues to move counterclockwise.", "type": "camera", "weight": 3 }, { "content": "The battery is clearly marked with a brand logo and model number information.", "type": "attribute", "weight": 2 }, { "content": "Some metal bolts and fixtures can also be seen.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0812", "video_path": "VIDGEN-1M/cG3KE6qRBZU-Scene-0045.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 12.64, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera is focused on a spacious outdoor equestrian arena. A young woman wearing a black short-sleeved top and black riding pants is riding on a brown horse. The woman's silver hair is tied into a low ponytail and she wears a black helmet. The woman rides to the left of the screen. There is a wooden fence around the arena. There are some green trees and yellow weeds in the background. There are some clouds in the sky and the weather is relatively clear.\nThe camera pans left to follow the horse's movement. The horse turns and runs forward to face the camera. The rider holds the reins tightly, and rider's body rises and falls with the rhythm of the horse. A white fence with red wooden boards appears on the left side of the frame.\nIn the second half of the video, the rider begins to guide the horse to do jumping training. There are some green and white obstacles in the field, and the rider makes the horse speed up and jump over these obstacles. The horse jumps in the air, jumps over the obstacles, and then lands steadily. The camera moves quickly to the right along with the horse's movements. In the background, a young white woman in khaki clothes stands in the field and watches the horse's movements. In the distance, a house appears deep in the trees.\nFinally, the camera moves to the right and slows down. The rider tightens the reins to slow the horse down. The camera finally captures the backs of the rider and horse.", "events": [ { "event": "The camera is focused on a spacious outdoor equestrian arena. A young woman wearing a black short-sleeved top and black riding pants is riding on a brown horse. The woman's silver hair is tied into a low ponytail and she wears a black helmet. The woman rides to the left of the screen. There is a wooden fence around the arena. There are some green trees and yellow weeds in the background. There are some clouds in the sky and the weather is relatively clear.", "visual_elements": [ { "content": "The camera is pointed at a spacious outdoor equestrian arena.", "type": "camera", "weight": 3 }, { "content": "A young woman wearing a black short-sleeved top and black riding pants is riding on a brown horse.", "type": "attribute", "weight": 3 }, { "content": "The woman has silver hair in a low ponytail and wears a black helmet.", "type": "attribute", "weight": 1 }, { "content": "The woman rides to the left of the picture.", "type": "action", "weight": 3 }, { "content": "The arena is surrounded by a wooden fence.", "type": "scene", "weight": 1 }, { "content": "There are some green trees and yellow weeds in the background.", "type": "scene", "weight": 2 }, { "content": "There are some clouds in the sky and the weather is relatively clear.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans left to follow the horse's movement. The horse turns and runs forward to face the camera. The rider holds the reins tightly, and rider's body rises and falls with the rhythm of the horse. A white fence with red wooden boards appears on the left side of the frame.", "visual_elements": [ { "content": "The camera pans left following the horse's movement.", "type": "camera", "weight": 3 }, { "content": "The horse turns and runs forward facing the camera.", "type": "action", "weight": 3 }, { "content": "The rider holds the reins tightly, his body rises and falls with the horse's rhythm.", "type": "action", "weight": 3 }, { "content": "A white fence with red wooden boards appears on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "In the second half of the video, the rider begins to guide the horse to do jumping training. There are some green and white obstacles in the field, and the rider makes the horse speed up and jump over these obstacles. The horse jumps in the air, jumps over the obstacles, and then lands steadily. The camera moves quickly to the right along with the horse's movements. In the background, a young white woman in khaki clothes stands in the field and watches the horse's movements. In the distance, a house appears deep in the trees.", "visual_elements": [ { "content": "In the second half of the video, the rider begins to guide the horse to do jumping training.", "type": "action", "weight": 3 }, { "content": "There are some green and white obstacles in the field.", "type": "scene", "weight": 2 }, { "content": "The rider makes the horse speed up and jump over these obstacles.", "type": "action", "weight": 3 }, { "content": "The horse jumps in the air, jumps over the obstacles, and then lands steadily.", "type": "action", "weight": 3 }, { "content": "The camera moves quickly to the right along with the horse's movements.", "type": "camera", "weight": 3 }, { "content": "In the background, a young white woman in khaki clothes stands in the field and watches the horse's movements.", "type": "scene", "weight": 1 }, { "content": "A house appears in the distance deep in the trees.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera moves to the right and slows down. The rider tightens the reins to slow the horse down. The camera finally captures the backs of the rider and horse.", "visual_elements": [ { "content": "Finally, the camera moves to the right and slows down.", "type": "camera", "weight": 3 }, { "content": "The rider tightens the reins to slow the horse down.", "type": "action", "weight": 3 }, { "content": "The camera finally captures the backs of the rider and the horse.", "type": "camera", "weight": 1 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0813", "video_path": "VIDGEN-1M/cShPs_rvbHg-Scene-0082.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 8.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on two animal toys, a long-eared dog on the left and a small bee on the right. The dog has a light brown body, dark gray ears, large and shiny eyes with green eyeshadow, and a white spot on the face. The small bee is yellow with green wings and large and bright eyes. At the top of the screen, a hand with purple nail polish with blue spots on the nails is holding the dog's head. The background of the picture is light blue, and the bottom is a white plane.\nThe camera moves slightly to the left. The dog's head is gently pressed by the finger, causing it to move slightly to the left of the frame. The bee toy partially disappears on the right side of the frame. A pink and white object partially enters the frame from the front.\nFinally, the camera moves gently to the right, and the hand pinches the dog's ears a few times, and then presses the dog's head.", "events": [ { "event": "At the beginning of the video, the camera is focused on two animal toys, a long-eared dog on the left and a small bee on the right. The dog has a light brown body, dark gray ears, large and shiny eyes with green eyeshadow, and a white spot on the face. The small bee is yellow with green wings and large and bright eyes. At the top of the screen, a hand with purple nail polish with blue spots on the nails is holding the dog's head. The background of the picture is light blue, and the bottom is a white plane.", "visual_elements": [ { "content": "The camera is focused on two animal toys.", "type": "camera", "weight": 3 }, { "content": "On the left side of the screen is a long-eared dog and on the right side is a small bee.", "type": "attribute", "weight": 3 }, { "content": "The dog has a light brown body and dark grey ears.", "type": "attribute", "weight": 3 }, { "content": "The dog's eyes are large and shiny with green eyeshadow.", "type": "attribute", "weight": 2 }, { "content": "There is a white spot on the dog's face.", "type": "attribute", "weight": 1 }, { "content": "The small bee is yellow.", "type": "attribute", "weight": 3 }, { "content": "The bee has green wings and large, bright eyes.", "type": "attribute", "weight": 1 }, { "content": "A hand is pressing the dog's head at the top of the screen.", "type": "action", "weight": 3 }, { "content": "The nails of the hand are painted with purple nail polish with blue spots.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves slightly to the left. The dog's head is gently pressed by the finger, causing it to move slightly to the left of the frame. The bee toy partially disappears on the right side of the frame. A pink and white object partially enters the frame from the front.", "visual_elements": [ { "content": "The camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The fingers gently press the dog's head, causing it to move slightly to the left side of the screen.", "type": "action", "weight": 2 }, { "content": "The bee toy part disappears on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "A pink and white object partially enters the screen from the front.", "type": "attribute", "weight": 1 } ] }, { "event": "Finally, the camera moves gently to the right, and the hand pinches the dog's ears a few times, and then presses the dog's head.", "visual_elements": [ { "content": "Finally, the camera moves slightly to the right.", "type": "camera", "weight": 2 }, { "content": "The hand pinches the dog's ears a few times before pressing the dog's head.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0814", "video_path": "VIDGEN-1M/dCiXgpLJm4E-Scene-0042.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 8.58, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on two men in the water, which is about chest deep. The man on the left is wearing a black diving suit with a name tag on his chest. The man on the right is wearing a short-sleeved grey diving suit and holding a small camera device. Both men have black gloves on their hands. In the background, a red and white diving flag is planted on the riverbank. There is a stone wall with iron railings on the riverbank, and behind the railings are green space and buildings. The building is a red brick structure with large glass windows. There is also a white water tower and some trees in the distance.\nThen, the camera moves slightly to the left. The man on the right side of the screen is holding a camera and filming the person on the left side of the screen. The person on the left side of the screen has his hands hanging down in the water.\nFinally, the camera starts to shake. The man on the right side of the screen is holding a camera and facing the camera. A hand with a black glove appears in the lower right corner of the screen. The hand stretches out from the water and swings left and right. There is light reflection on the water surface in the background.", "events": [ { "event": "The video begins with the camera focusing on two men in the water, which is about chest deep. The man on the left is wearing a black diving suit with a name tag on his chest. The man on the right is wearing a short-sleeved grey diving suit and holding a small camera device. Both men have black gloves on their hands. In the background, a red and white diving flag is planted on the riverbank. There is a stone wall with iron railings on the riverbank, and behind the railings are green space and buildings. The building is a red brick structure with large glass windows. There is also a white water tower and some trees in the distance.", "visual_elements": [ { "content": "The camera is focused on two men in the water.", "type": "camera", "weight": 3 }, { "content": "The water is about chest-deep.", "type": "scene", "weight": 3 }, { "content": "The man on the left is wearing a black diving suit.", "type": "attribute", "weight": 2 }, { "content": "The man on the left has a name tag on his chest.", "type": "attribute", "weight": 1 }, { "content": "The man on the right is wearing a gray short-sleeved diving suit.", "type": "attribute", "weight": 2 }, { "content": "The man on the right is holding a small camera.", "type": "attribute", "weight": 2 }, { "content": "Both of them are wearing black gloves.", "type": "attribute", "weight": 2 }, { "content": "In the background, a red and white diving flag is planted on the river bank.", "type": "scene", "weight": 1 }, { "content": "There is a stone wall on the river bank.", "type": "scene", "weight": 2 }, { "content": "There are iron railings on the stone wall.", "type": "scene", "weight": 1 }, { "content": "Behind the railings are green space and a red brick building with large glass windows.", "type": "scene", "weight": 1 }, { "content": "There is also a white water tower and some trees in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves slightly to the left. The man on the right side of the screen is holding a camera and filming the person on the left side of the screen. The person on the left side of the screen has his hands hanging down in the water.", "visual_elements": [ { "content": "Then, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The man on the right of the screen is holding a camera and shooting at the man on the left of the screen.", "type": "action", "weight": 3 }, { "content": "The man on the left of the screen has his hands hanging in the water.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera starts to shake. The man on the right side of the screen is holding a camera and facing the camera. A hand with a black glove appears in the lower right corner of the screen. The hand stretches out from the water and swings left and right. There is light reflection on the water surface in the background.", "visual_elements": [ { "content": "Finally, the camera starts to shake.", "type": "camera", "weight": 3 }, { "content": "The man on the right of the screen is holding a camera and facing the camera.", "type": "action", "weight": 3 }, { "content": "A hand with a black glove appears in the lower right of the screen.", "type": "attribute", "weight": 1 }, { "content": "The hand extends out of the water and swings left and right.", "type": "action", "weight": 1 }, { "content": "There is light reflection on the water surface in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0815", "video_path": "VIDGEN-1M/dEXCuIdqNr8-Scene-0083.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 8.28, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a close-up of the steering wheel area of a car, and the camera shakes slightly to show a logo in the center of the steering wheel. The dashboard behind the steering wheel is illuminated and shows various vehicle information, including the speedometer and other indicators, and the overall interior of the car is dark. During this period, the scene shows a person in a dark suit and white shirt being introduced back and forth near the steering wheel with the left hand wearing a dark watch.\nThe camera then moves to the left and several people can be seen standing and talking in the background, dressed in formal business attire.", "events": [ { "event": "The video begins with a close-up of the steering wheel area of a car, and the camera shakes slightly to show a logo in the center of the steering wheel. The dashboard behind the steering wheel is illuminated and shows various vehicle information, including the speedometer and other indicators, and the overall interior of the car is dark. During this period, the scene shows a person in a dark suit and white shirt being introduced back and forth near the steering wheel with the left hand wearing a dark watch.", "visual_elements": [ { "content": "The camera closes in on the steering wheel area of a car.", "type": "camera", "weight": 3 }, { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "There is a logo in the center of the steering wheel.", "type": "attribute", "weight": 1 }, { "content": "The dashboard behind the steering wheel is lit up.", "type": "attribute", "weight": 3 }, { "content": "The dashboard shows various vehicle information.", "type": "attribute", "weight": 2 }, { "content": "The information includes a speedometer and other indicators.", "type": "attribute", "weight": 3 }, { "content": "The overall interior of the car is dark.", "type": "scene", "weight": 2 }, { "content": "The scene shows a person in a dark suit and white shirt being introduced back and forth near the steering wheel with the left hand wearing a dark watch.", "type": "action", "weight": 2 } ] }, { "event": "The camera then moves to the left and several people can be seen standing and talking in the background, dressed in formal business attire.", "visual_elements": [ { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "In the background, several people can be seen standing and talking together.", "type": "scene", "weight": 2 }, { "content": "These people are dressed in formal business attire.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0816", "video_path": "VIDGEN-1M/dL4vRihNk4s-Scene-0031.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 3.87, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an old cinema building located at a corner. The curb on the left side is painted yellow and blue. In the center of the corner stands a red ticket booth, and the rooftop structure of the entrance aligns with the shape of the corner. A red mural at the center of the roof features regularly spaced golden patterns, while the white wall bases on both sides display the opening hours and other information.\nThe camera pans upward, revealing colorful vertical striped structures adorning the rooftop, arranged in an arcuate pattern. In front of this arc is a large white signboard that reads \"RODGERS.\" To the left of the signboard stands a group of sculpted pigeons in flight, against a background of blue sky.", "events": [ { "event": "At the beginning of the video, the camera focuses on an old cinema building located at a corner. The curb on the left side is painted yellow and blue. In the center of the corner stands a red ticket booth, and the rooftop structure of the entrance aligns with the shape of the corner. A red mural at the center of the roof features regularly spaced golden patterns, while the white wall bases on both sides display the opening hours and other information.", "visual_elements": [ { "content": "The camera focuses on an old cinema building.", "type": "camera", "weight": 3 }, { "content": "This building is located at a corner.", "type": "attribute", "weight": 2 }, { "content": "The curb on the left side is painted yellow and blue.", "type": "attribute", "weight": 1 }, { "content": "In the center of the corner stands a red ticket booth.", "type": "attribute", "weight": 2 }, { "content": "The rooftop structure of the cinema entrance aligns with the shape of the corner.", "type": "attribute", "weight": 2 }, { "content": "A red mural at the center of the roof features regularly spaced golden patterns.", "type": "attribute", "weight": 2 }, { "content": "The white wall bases on both sides display the opening hours and other content.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans upward, revealing colorful vertical striped structures adorning the rooftop, arranged in an arcuate pattern. In front of this arc is a large white signboard that reads \"RODGERS.\" To the left of the signboard stands a group of sculpted pigeons in flight, against a background of blue sky.", "visual_elements": [ { "content": "The camera pans upward.", "type": "camera", "weight": 3 }, { "content": "Colorful vertical striped structures adorn the rooftop.", "type": "attribute", "weight": 2 }, { "content": "These stripes are arranged in an arcuate pattern.", "type": "attribute", "weight": 2 }, { "content": "In front of this arc is a large white signboard.", "type": "attribute", "weight": 2 }, { "content": "The large signboard reads \"RODGERS.\"", "type": "attribute", "weight": 2 }, { "content": "To the left of the signboard stands a group of sculpted pigeons in flight.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0817", "video_path": "VIDGEN-1M/dQ3xvJaGQB4-Scene-0175.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 12.61, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a bowl of white rice. The grains of rice are crystal clear. There are red sauce, red chilies, green scallions and white vegetables on the rice. A spoon is used to stir the rice, making the rice fully coated with the sauce. Blurry soup bowls can be seen in the background.\nThe spoon scoops up a big mouthful of rice mixed with the sauce and vegetables, and the camera then moves slowly upward. There is still steam rising above the scooped-up rice.", "events": [ { "event": "At the beginning of the video, the camera is focused on a bowl of white rice. The grains of rice are crystal clear. There are red sauce, red chilies, green scallions and white vegetables on the rice. A spoon is used to stir the rice, making the rice fully coated with the sauce. Blurry soup bowls can be seen in the background.", "visual_elements": [ { "content": "The camera is focused on a bowl of white rice.", "type": "camera", "weight": 3 }, { "content": "The grains of rice are crystal clear.", "type": "attribute", "weight": 1 }, { "content": "There are red sauce, red chilies, green scallions and white vegetables on the rice.", "type": "attribute", "weight": 2 }, { "content": "A spoon is used to stir the rice.", "type": "action", "weight": 3 }, { "content": "Stirring makes the rice fully coated with the sauce.", "type": "attribute", "weight": 2 }, { "content": "Blurry soup bowls can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "The spoon scoops up a big mouthful of rice mixed with the sauce and vegetables, and the camera then moves slowly upward. There is still steam rising above the scooped-up rice.", "visual_elements": [ { "content": "The spoon scoops up a big mouthful of rice mixed with the sauce and vegetables.", "type": "action", "weight": 3 }, { "content": "The camera then moves slowly upward.", "type": "camera", "weight": 3 }, { "content": "There is still steam rising above the scooped-up rice.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0818", "video_path": "VIDGEN-1M/duphyZ8yCRs-Scene-0041.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 7.3, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a red dirt off-road track. There is a male rider in a white racing suit and a black and white helmet in the picture. The rider is riding a red off-road motorcycle from a distance. There are obvious black and yellow logos on the rider's racing suit, and there are also eye-catching yellow and black logos on the motorcycle. The background is dense green vegetation and some yellow weeds.\nThe camera is shot from the front, following the rider's movements to the upper left. The rider accelerates up a small slope, and the motorcycle flies into the air. The rider performs a difficult sideways move in the air. The motorcycle tilts in the air, and the rider maintains balance. Then, after completing the move in the air, the rider lands steadily. The tires of the motorcycle touch the ground, raising a cloud of red dust.\nThe camera then shoots from behind the rider. The camera continues to follow the rider's movements from a higher angle. The rider again rushes up a higher ramp and the motorcycle becomes airborne again. This time, the rider performs a more complex flip in the air, the motorcycle spins in the air, and the rider maintains perfect control. There are some buildings and telephone poles in the distance. There are two spectators standing next to the track watching the performance of the motorcyclist and filming the video with their mobile phones.", "events": [ { "event": "At the beginning of the video, the camera is focused on a red dirt off-road track. There is a male rider in a white racing suit and a black and white helmet in the picture. The rider is riding a red off-road motorcycle from a distance. There are obvious black and yellow logos on the rider's racing suit, and there are also eye-catching yellow and black logos on the motorcycle. The background is dense green vegetation and some yellow weeds.", "visual_elements": [ { "content": "The camera is focused on a red dirt off-road track.", "type": "camera", "weight": 3 }, { "content": "There is a male rider in a white racing suit and a black and white helmet in the picture.", "type": "attribute", "weight": 3 }, { "content": "The rider is riding a red off-road motorcycle and coming quickly from a distance.", "type": "action", "weight": 3 }, { "content": "The rider's racing suit has obvious black and yellow logos, and the motorcycle also has eye-catching yellow and black logos.", "type": "attribute", "weight": 2 }, { "content": "The background is dense green vegetation and some yellow weeds.", "type": "scene", "weight": 3 } ] }, { "event": "The camera is shot from the front, following the rider's movements to the upper left. The rider accelerates up a small slope, and the motorcycle flies into the air. The rider performs a difficult sideways move in the air. The motorcycle tilts in the air, and the rider maintains balance. Then, after completing the move in the air, the rider lands steadily. The tires of the motorcycle touch the ground, raising a cloud of red dust.", "visual_elements": [ { "content": "The camera is shot from the front, following the rider's movements to the upper left.", "type": "camera", "weight": 3 }, { "content": "The rider accelerates up a small slope, and the motorcycle flies into the air.", "type": "action", "weight": 3 }, { "content": "The rider performs a difficult sideways movement in the air.", "type": "action", "weight": 3 }, { "content": "The motorcycle tilts in the air, and the rider maintains balance.", "type": "action", "weight": 2 }, { "content": "Then, after completing the movement in the air, the rider lands steadily.", "type": "action", "weight": 1 }, { "content": "The motorcycle's tire touches the ground, raising a cloud of red dust.", "type": "action", "weight": 1 } ] }, { "event": "The camera then shoots from behind the rider. The camera continues to follow the rider's movements from a higher angle. The rider again rushes up a higher ramp and the motorcycle becomes airborne again. This time, the rider performs a more complex flip in the air, the motorcycle spins in the air, and the rider maintains perfect control. There are some buildings and telephone poles in the distance. There are two spectators standing next to the track watching the performance of the motorcyclist and filming the video with their mobile phones.", "visual_elements": [ { "content": "The camera then shoots from behind the rider.", "type": "camera", "weight": 3 }, { "content": "The camera continues to follow the rider's movements from a higher angle.", "type": "camera", "weight": 3 }, { "content": "The rider rushes up a higher ramp again and the motorcycle takes off again.", "type": "action", "weight": 3 }, { "content": "This time, the rider performs a more complex flip in the air.", "type": "action", "weight": 3 }, { "content": "The motorcycle spins in the air and the rider maintains perfect control.", "type": "action", "weight": 2 }, { "content": "There are two spectators standing next to the track watching the performance of the motorcyclist and filming the video with their mobile phones.", "type": "action", "weight": 1 }, { "content": "There are some buildings and telephone poles in the distance.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0819", "video_path": "VIDGEN-1M/dy9bmQORumk-Scene-0035.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 9.76, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a hand. This hand is wearing a blue wristband, and its fingers are pointing at a stack of yogurt cups. There are white words \"Vanilla Light\" marked on the cups. The packaging color of the yogurt cups is mainly a combination of blue and purple.\nThe fingers gently tapped on the top yogurt cup. Then, the camera zooms in and moves slightly to the right. Meanwhile, this hand picks up the top yogurt cup to show it, and then puts the picked-up yogurt cup back to its original place and taps on it again. Then, the camera zooms out slightly.\nIn the background, there is a red mesh bag full of oranges on the left side of the yogurt cups. There is a red label on the mesh bag, reading \"Mix & Match 2 for €5\". Behind the orange mesh bag is a bread bag labeled \"Brennans Wholemeal Be Good\". On the right side of the yogurt cups is a large package of toilet paper. There are blue and white patterns on the packaging and it is labeled \"Soft Toilet Tissue\".", "events": [ { "event": "At the beginning of the video, the camera is focused on a hand. This hand is wearing a blue wristband, and its fingers are pointing at a stack of yogurt cups. There are white words \"Vanilla Light\" marked on the cups. The packaging color of the yogurt cups is mainly a combination of blue and purple.", "visual_elements": [ { "content": "The camera is focused on a hand.", "type": "camera", "weight": 3 }, { "content": "This hand is wearing a blue wristband.", "type": "attribute", "weight": 1 }, { "content": "The fingers are pointing at a stack of yogurt cups.", "type": "attribute", "weight": 3 }, { "content": "There are white words \"Vanilla Light\" marked on the cups.", "type": "attribute", "weight": 1 }, { "content": "The packaging color of the yogurt cups is mainly a combination of blue and purple.", "type": "attribute", "weight": 2 } ] }, { "event": "The fingers gently tapped on the top yogurt cup. Then, the camera zooms in and moves slightly to the right. Meanwhile, this hand picks up the top yogurt cup to show it, and then puts the picked-up yogurt cup back to its original place and taps on it again. Then, the camera zooms out slightly.", "visual_elements": [ { "content": "The fingers gently tapped on the top yogurt cup.", "type": "action", "weight": 2 }, { "content": "The camera zooms in and moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "This hand picks up the top yogurt cup to show it.", "type": "action", "weight": 3 }, { "content": "This hand puts the picked-up yogurt cup back to its original place and taps on it again.", "type": "action", "weight": 3 }, { "content": "The camera zooms out slightly.", "type": "camera", "weight": 3 } ] }, { "event": "In the background, there is a red mesh bag full of oranges on the left side of the yogurt cups. There is a red label on the mesh bag, reading \"Mix & Match 2 for €5\". Behind the orange mesh bag is a bread bag labeled \"Brennans Wholemeal Be Good\". On the right side of the yogurt cups is a large package of toilet paper. There are blue and white patterns on the packaging and it is labeled \"Soft Toilet Tissue\".", "visual_elements": [ { "content": "In the background, there is a red mesh bag full of oranges on the left side of the yogurt cups.", "type": "scene", "weight": 2 }, { "content": "There is a red label on the mesh bag.", "type": "attribute", "weight": 1 }, { "content": "The label reads \"Mix & Match 2 for €5\".", "type": "attribute", "weight": 1 }, { "content": "Behind the orange mesh bag is a bread bag labeled \"Brennans Wholemeal Be Good\".", "type": "attribute", "weight": 1 }, { "content": "On the right side of the yogurt cups is a large package of toilet paper.", "type": "attribute", "weight": 2 }, { "content": "There are blue and white patterns on the packaging and it is labeled \"Soft Toilet Tissue\".", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0820", "video_path": "VIDGEN-1M/eEOoeqBh-B8-Scene-0058.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 7.44, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white man with a brown beard, a green hat and a light purple T-shirt. In the background is a building with gray brick and stone walls, and blue door and window frames. There is a green door on the left side of the picture with a \"Closed\" sign on it.\nThe camera keeps shaking left and right. The man adjusts his hat with his right hand while speaking to the camera.\nThe camera moves slightly to the right. Another person in blue appears on the right side of the frame. He continues to speak to the camera, occasionally turning his head to look to the right. The door on the left side of the frame gradually disappears to the left side of the camera, and the red sign on the right side of the frame fully appears, reading \"TOG24 Discover Life\".", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white man with a brown beard, a green hat and a light purple T-shirt. In the background is a building with gray brick and stone walls, and blue door and window frames. There is a green door on the left side of the picture with a \"Closed\" sign on it.", "visual_elements": [ { "content": "The camera is focused on a young white male.", "type": "camera", "weight": 3 }, { "content": "The male has a brown beard, a green hat, and a light purple T-shirt.", "type": "attribute", "weight": 3 }, { "content": "The background is a building with gray brick and stone walls.", "type": "scene", "weight": 3 }, { "content": "The building has blue door and window frames.", "type": "scene", "weight": 2 }, { "content": "There is a green door on the left side of the screen with a \"Closed\" sign on it.", "type": "scene", "weight": 1 } ] }, { "event": "The camera keeps shaking left and right. The man adjusts his hat with his right hand while speaking to the camera.", "visual_elements": [ { "content": "The camera keeps shaking from side to side.", "type": "camera", "weight": 3 }, { "content": "The male is talking to the camera and adjusting his hat with his right hand.", "type": "action", "weight": 3 } ] }, { "event": "The camera moves slightly to the right. Another person in blue appears on the right side of the frame. He continues to speak to the camera, occasionally turning his head to look to the right. The door on the left side of the frame gradually disappears to the left side of the camera, and the red sign on the right side of the frame fully appears, reading \"TOG24 Discover Life\".", "visual_elements": [ { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 2 }, { "content": "Another person wearing blue clothes appears on the right side of the screen.", "type": "attribute", "weight": 1 }, { "content": "The male continues to talk to the camera, occasionally turning his head to look to the right side of the screen.", "type": "action", "weight": 2 }, { "content": "The door on the left side of the screen gradually disappears to the left side of the camera.", "type": "scene", "weight": 1 }, { "content": "The red sign on the right side of the screen is fully revealed.", "type": "scene", "weight": 2 }, { "content": "The sign reads \"TOG24 Discover Life\"", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0821", "video_path": "VIDGEN-1M/eIjf62AaUD0-Scene-0102.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 8.41, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera looks down at the entire football field. In the picture, a player in a green jersey can be seen sprinting forward with the ball, and in front of him is the opponent's penalty area. Two players in white jerseys are flanking him on both sides to try to stop his attack, and other players in white jerseys are running in the distance behind him. The camera zooms in and moves to the right with the player. As the green jersey player sprinting with the ball gets closer and closer to the goal, the goalkeeper in the black jersey falls down, causing the green jersey player and one of the white jersey players to trip to the ground, while the other white jersey player is intact. The grass in the background is neatly divided into light and dark stripes, and the goal is on the right side of the picture. A billboard can also be seen on the right side of the picture.\nThe camera switches to a closer angle, shooting from behind the players. Player No. 11 in the green jersey is running towards the goal. On his left is player No. 18 in the white jersey, and on his right is player No. 29 in the white jersey. While running, player No. 18 on the left hits the player in the green jersey with his body, and both of them and the goalkeeper in the black jersey fall to the ground.", "events": [ { "event": "At the beginning of the video, the camera looks down at the entire football field. In the picture, a player in a green jersey can be seen sprinting forward with the ball, and in front of him is the opponent's penalty area. Two players in white jerseys are flanking him on both sides to try to stop his attack, and other players in white jerseys are running in the distance behind him. The camera zooms in and moves to the right with the player. As the green jersey player sprinting with the ball gets closer and closer to the goal, the goalkeeper in the black jersey falls down, causing the green jersey player and one of the white jersey players to trip to the ground, while the other white jersey player is intact. The grass in the background is neatly divided into light and dark stripes, and the goal is on the right side of the picture. A billboard can also be seen on the right side of the picture.", "visual_elements": [ { "content": "The camera looks down at the entire football field.", "type": "camera", "weight": 3 }, { "content": "In the picture, a player in a green jersey is seen sprinting forward with the ball.", "type": "action", "weight": 3 }, { "content": "In front of the green jersey player is the opponent's penalty area.", "type": "attribute", "weight": 2 }, { "content": "Two players in white jerseys are flanking the green jersey player to try to stop his attack.", "type": "action", "weight": 3 }, { "content": "Behind the green jersey player are other players in white jerseys running in the distance.", "type": "action", "weight": 1 }, { "content": "The camera zooms in and follows the player to the right.", "type": "camera", "weight": 3 }, { "content": "The green jersey player sprinting with the ball is getting closer and closer to the goal.", "type": "action", "weight": 2 }, { "content": "The goalkeeper in the black jersey falls down, causing the green jersey player and one of the white jersey players to trip and fall to the ground.", "type": "action", "weight": 3 }, { "content": "Another The white jersey player is intact.", "type": "attribute", "weight": 2 }, { "content": "The pitch in the background is neatly divided into light and dark stripes.", "type": "scene", "weight": 2 }, { "content": "The goal is on the right side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The billboard can also be seen on the right side of the screen.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera switches to a closer angle, shooting from behind the players. Player No. 11 in the green jersey is running towards the goal. On his left is player No. 18 in the white jersey, and on his right is player No. 29 in the white jersey. While running, player No. 18 on the left hits the player in the green jersey with his body, and both of them and the goalkeeper in the black jersey fall to the ground.", "visual_elements": [ { "content": "The camera switches to a closer angle.", "type": "camera", "weight": 3 }, { "content": "The shot is taken from behind the players.", "type": "camera", "weight": 3 }, { "content": "The number 11 player in the green jersey runs towards the goal.", "type": "action", "weight": 3 }, { "content": "To the left of the green jersey player is the number 18 player in the white jersey.", "type": "attribute", "weight": 2 }, { "content": "To the right is the number 29 player in the white jersey.", "type": "attribute", "weight": 2 }, { "content": "While running, the number 18 player on the left hits the green jersey player with his body.", "type": "action", "weight": 3 }, { "content": "Both players and the goalkeeper in the black jersey fall to the ground.", "type": "action", "weight": 3 } ] } ], "n_events": 2, "n_elements": 19 }, { "index": "TUNA_0822", "video_path": "VIDGEN-1M/eNx019U6lY0-Scene-0060.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 6.38, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on an indoor gymnastics training area. A middle-aged Asian man in a dark blue sportswear with white stripes and logos stands in the center of the frame. The man has his hands on his hips and is looking to the right of the frame. On the right side of the frame, a pair of slender legs are on a yellow gymnastics mat in the foreground. In the background, a blue locker is filled with items of various colors. The walls of the room are white. There is some light coming in from the window on the left side of the frame. There are also some framed decorations hanging on the wall on the right side of the frame.\nThe camera moves slightly to the left. A young woman in a tight black gymnastics suit with rose-red shoulder straps is practicing gymnastics moves. The woman's golden hair is combed into a smooth bun, and is decorated with a rose-red rubber band. The woman first bends down, then quickly jumps up and flips her body in the air. After the woman completes the flip in the air, she lands steadily on the mat. After standing firmly, the woman raises her hands again and does a split jump in the air. Finally, after the woman completes a series of gymnastics moves, she raises her hands above her head again. The man on the right side of the screen smiles and pays attention to the woman's movements.", "events": [ { "event": "The video begins with the camera focused on an indoor gymnastics training area. A middle-aged Asian man in a dark blue sportswear with white stripes and logos stands in the center of the frame. The man has his hands on his hips and is looking to the right of the frame. On the right side of the frame, a pair of slender legs are on a yellow gymnastics mat in the foreground. In the background, a blue locker is filled with items of various colors. The walls of the room are white. There is some light coming in from the window on the left side of the frame. There are also some framed decorations hanging on the wall on the right side of the frame.", "visual_elements": [ { "content": "The camera is pointed at an indoor gymnastics training field.", "type": "camera", "weight": 3 }, { "content": "A middle-aged Asian man wearing a dark blue sportswear with white stripes and logos stands in the center of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man has his hands on his hips and looks to the right of the screen.", "type": "action", "weight": 3 }, { "content": "On the right side of the screen, there is a pair of slender legs on the yellow gymnastics mat in the foreground of the camera.", "type": "attribute", "weight": 2 }, { "content": "The blue locker in the background is filled with items of various colors.", "type": "scene", "weight": 2 }, { "content": "The walls of the room are white.", "type": "scene", "weight": 2 }, { "content": "Some light comes in from the window on the left side of the screen.", "type": "scene", "weight": 1 }, { "content": "There are also some frame decorations hanging on the wall on the right side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves slightly to the left. A young woman in a tight black gymnastics suit with rose-red shoulder straps is practicing gymnastics moves. The woman's golden hair is combed into a smooth bun, and is decorated with a rose-red rubber band. The woman first bends down, then quickly jumps up and flips her body in the air. After the woman completes the flip in the air, she lands steadily on the mat. After standing firmly, the woman raises her hands again and does a split jump in the air. Finally, after the woman completes a series of gymnastics moves, she raises her hands above her head again. The man on the right side of the screen smiles and pays attention to the woman's movements.", "visual_elements": [ { "content": "The camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "A young woman wearing a tight black gymnastics suit with rose-red shoulder straps is practicing gymnastics moves.", "type": "attribute", "weight": 3 }, { "content": "The woman's golden hair is combed into a smooth bun and decorated with a rose-red rubber band.", "type": "attribute", "weight": 1 }, { "content": "The woman first bends down, then quickly jumps up and flips in the air.", "type": "action", "weight": 3 }, { "content": "After the woman completes the flip in the air, she lands steadily on the mat.", "type": "action", "weight": 3 }, { "content": "After standing firm, the woman raises her hands again and does a split jump in the air.", "type": "action", "weight": 2 }, { "content": "Finally, after the woman completes a series of gymnastics moves, she raises her hands above her head again.", "type": "action", "weight": 1 }, { "content": "The man on the right side of the screen smiles and pays attention to the woman's movements.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0823", "video_path": "VIDGEN-1M/eRLSuDJtGzg-Scene-0055.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 16.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The camera is shooting down a football field. Two teams are playing on the green football field. One team is wearing red jerseys and the other team is wearing black jerseys. A referee wearing a yellow jersey stands in the middle of the field. The player wearing a red jersey is controlling the ball. Suddenly, he is tripped by the opponent and falls to the ground. Then the player wearing a black jersey grabs the ball and passes it to his teammates. After passing the ball, the teammates run to the left side of the screen. The player wearing a red jersey follows closely. Then a player wearing a white jersey runs out from the left side of the screen. He seizes the opportunity and successfully kicks the ball into the goal. Throughout the process, the camera follows the trajectory of the ball. In the background, the stands around the field are full of spectators. Under the stands, the field is surrounded by red and blue billboards with words such as \"GIRONA FC\", \"activa\" and \"CaixaBank\" displayed on the billboards.", "events": [ { "event": "The camera is shooting down a football field. Two teams are playing on the green football field. One team is wearing red jerseys and the other team is wearing black jerseys. A referee wearing a yellow jersey stands in the middle of the field. The player wearing a red jersey is controlling the ball. Suddenly, he is tripped by the opponent and falls to the ground. Then the player wearing a black jersey grabs the ball and passes it to his teammates. After passing the ball, the teammates run to the left side of the screen. The player wearing a red jersey follows closely. Then a player wearing a white jersey runs out from the left side of the screen. He seizes the opportunity and successfully kicks the ball into the goal. Throughout the process, the camera follows the trajectory of the ball. In the background, the stands around the field are full of spectators. Under the stands, the field is surrounded by red and blue billboards with words such as \"GIRONA FC\", \"activa\" and \"CaixaBank\" displayed on the billboards.", "visual_elements": [ { "content": "The camera is shooting down a football field.", "type": "camera", "weight": 3 }, { "content": "Two teams are playing on the green football field.", "type": "attribute", "weight": 2 }, { "content": "One team is wearing red jerseys, the other team is wearing black jerseys.", "type": "attribute", "weight": 2 }, { "content": "A referee wearing a yellow jersey is standing in the middle of the field.", "type": "attribute", "weight": 2 }, { "content": "The player wearing a red jersey is controlling the ball.", "type": "action", "weight": 3 }, { "content": "Suddenly, the player wearing a red jersey is tripped by the opponent.", "type": "action", "weight": 3 }, { "content": "Then the player wearing a black jersey grabs the ball and passes it to his teammate.", "type": "action", "weight": 3 }, { "content": "The teammates pass the ball and run to the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The player wearing a red jersey follows closely.", "type": "action", "weight": 2 }, { "content": "Then a player wearing a white jersey runs out from the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The player wearing a white jersey seizes the opportunity and successfully kicks the ball into the goal.", "type": "action", "weight": 3 }, { "content": "The camera follows the ball throughout the process.", "type": "camera", "weight": 3 }, { "content": "The stands around the field in the background are full of spectators.", "type": "scene", "weight": 2 }, { "content": "The field under the stands is surrounded by red and blue billboards.", "type": "attribute", "weight": 2 }, { "content": "The billboards display words such as \"GIRONA FC\", \"activa\" and \"CaixaBank\".", "type": "attribute", "weight": 1 } ] } ], "n_events": 1, "n_elements": 15 }, { "index": "TUNA_0824", "video_path": "VIDGEN-1M/feGrcygzypw-Scene-0057.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 8.81, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a brown horse walking on a sandy field. The horse is equipped with a black saddle and reins. On the right side of the screen is a young woman wearing tight jeans and a sleeveless top. The woman is holding the reins and is guiding the horse to the right side of the screen. The background is an open sand field.\nThe camera follows the movement of the horse and stays on the side of the horse. The female features gradually emerge, and it is a young white woman with red hair. At the end of the video, the horse runs to the right side of the screen, leaving behind its tail swinging. The woman keeps a certain distance from the horse, on the left side of the screen. There are several horizontal wooden poles on the ground with colored markings on them. The green vegetation around the field gradually emerges.", "events": [ { "event": "At the beginning of the video, the camera is focused on a brown horse walking on a sandy field. The horse is equipped with a black saddle and reins. On the right side of the screen is a young woman wearing tight jeans and a sleeveless top. The woman is holding the reins and is guiding the horse to the right side of the screen. The background is an open sand field.", "visual_elements": [ { "content": "The camera is focused on a brown horse walking on a sand field.", "type": "camera", "weight": 3 }, { "content": "The horse is equipped with a black saddle and reins.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the screen is a young woman wearing tight jeans and a sleeveless top.", "type": "attribute", "weight": 3 }, { "content": "The woman is holding the reins and guiding the horse to the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The background is an open sand field.", "type": "scene", "weight": 3 } ] }, { "event": "The camera follows the movement of the horse and stays on the side of the horse. The female features gradually emerge, and it is a young white woman with red hair. At the end of the video, the horse runs to the right side of the screen, leaving behind its tail swinging. The woman keeps a certain distance from the horse, on the left side of the screen. There are several horizontal wooden poles on the ground with colored markings on them. The green vegetation around the field gradually emerges.", "visual_elements": [ { "content": "The camera follows the movement of the horse.", "type": "camera", "weight": 3 }, { "content": "The camera remains on the side of the horse.", "type": "camera", "weight": 2 }, { "content": "The female features gradually emerge, and it is a young white woman with red hair.", "type": "attribute", "weight": 3 }, { "content": "At the end of the video, the horse runs to the right side of the screen.", "type": "action", "weight": 2 }, { "content": "The horse leaves behind its tail swinging.", "type": "action", "weight": 1 }, { "content": "The woman is on the left side of the screen, keeping a certain distance from the horse.", "type": "attribute", "weight": 2 }, { "content": "There are several horizontal wooden poles on the ground.", "type": "scene", "weight": 2 }, { "content": "There are colored markings on the wooden poles.", "type": "scene", "weight": 1 }, { "content": "The green vegetation around the field gradually emerges.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0825", "video_path": "VIDGEN-1M/frf9EZVGlXs-Scene-0088.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 14.6, "resolution": { "width": 1280, "height": 720 }, "caption": "The video shows a boxing match at the 2014 Glasgow Commonwealth Games. The match takes place in a boxing ring surrounded by white ropes. The floor of the boxing ring is blue and has the words \"GLASGOW 2014 XX COMMONWEALTH GAMES\" printed on it. There are white pads with logos printed on them at the four corners of the boxing ring. There are spectators sitting around the boxing ring. There are some advertising billboards of different colors in the background.\nAt the beginning of the video, two young white male boxers are fighting fiercely. The boxer on the left is wearing a red boxing suit with the words \"NIR\" printed on the back and green boxing shoes. The boxer on the right is wearing a blue boxing suit with the words \"NZL\" printed on the back and white boxing shoes. Both of them have white letters and stripes on their clothes, and they are wearing boxing gloves of corresponding colors. The middle-aged male referee on the field is wearing a white short-sleeved shirt and black pants. The blue boxer keeps attacking the red boxer, while the red boxer flexibly dodges and looks for opportunities to counterattack. The referee moves back and forth between the two, keeping a close eye on the game. In the last few seconds of the video, the referee signals the two boxers to stop the game. The camera follows the two people all the time and keeps shaking left and right.", "events": [ { "event": "The video shows a boxing match at the 2014 Glasgow Commonwealth Games. The match takes place in a boxing ring surrounded by white ropes. The floor of the boxing ring is blue and has the words \"GLASGOW 2014 XX COMMONWEALTH GAMES\" printed on it. There are white pads with logos printed on them at the four corners of the boxing ring. There are spectators sitting around the boxing ring. There are some advertising billboards of different colors in the background.", "visual_elements": [ { "content": "The video shows a boxing match at the 2014 Commonwealth Games in Glasgow.", "type": "camera", "weight": 3 }, { "content": "The match takes place in a boxing ring surrounded by white boxing ropes.", "type": "scene", "weight": 3 }, { "content": "The floor of the boxing ring is blue with the words \"GLASGOW 2014 XX COMMONWEALTH GAMES\" printed on it.", "type": "scene", "weight": 1 }, { "content": "There are white pads with logos printed on the four corners of the boxing ring.", "type": "scene", "weight": 1 }, { "content": "The boxing ring is full of spectators.", "type": "scene", "weight": 2 }, { "content": "There are some billboards of different colors in the background.", "type": "scene", "weight": 1 } ] }, { "event": "At the beginning of the video, two young white male boxers are fighting fiercely. The boxer on the left is wearing a red boxing suit with the words \"NIR\" printed on the back and green boxing shoes. The boxer on the right is wearing a blue boxing suit with the words \"NZL\" printed on the back and white boxing shoes. Both of them have white letters and stripes on their clothes, and they are wearing boxing gloves of corresponding colors. The middle-aged male referee on the field is wearing a white short-sleeved shirt and black pants. The blue boxer keeps attacking the red boxer, while the red boxer flexibly dodges and looks for opportunities to counterattack. The referee moves back and forth between the two, keeping a close eye on the game. In the last few seconds of the video, the referee signals the two boxers to stop the game. The camera follows the two people all the time and keeps shaking left and right.", "visual_elements": [ { "content": "At the beginning of the video, two young white male boxers are fighting fiercely.", "type": "action", "weight": 3 }, { "content": "The boxer on the left is wearing a red boxing suit with the words \"NIR\" printed on the back and green boxing shoes.", "type": "attribute", "weight": 3 }, { "content": "The boxer on the right is wearing a blue boxing suit with the words \"NZL\" printed on the back and white boxing shoes.", "type": "attribute", "weight": 3 }, { "content": "Both boxers have white letters and stripes on their clothes and wear boxing gloves of corresponding colors on their hands.", "type": "attribute", "weight": 1 }, { "content": "The middle-aged male referee on the field is wearing a white short-sleeved shirt and black pants.", "type": "attribute", "weight": 2 }, { "content": "The blue boxer keeps attacking the red boxer.", "type": "action", "weight": 3 }, { "content": "The red boxer evades and looks for opportunities to counterattack.", "type": "action", "weight": 3 }, { "content": "The referee moves back and forth between the two and keeps watching the game.", "type": "action", "weight": 2 }, { "content": "In the last few seconds of the video, the referee signals the two boxers to stop the game.", "type": "action", "weight": 2 }, { "content": "The camera follows the two and keeps shaking left and right.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0826", "video_path": "VIDGEN-1M/g412UF0eo4E-Scene-0045.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 13.31, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a Caucasian girl and a dog in a warmly decorated room. The girl has blonde hair that falls over her shoulders. The girl is wearing a rainbow-colored hooded nightgown and stands by the bed. The dog is a fluffy light brown dog standing on the bed. The room has pink walls and several decorative paintings, one of which is a picture of a unicorn. The bed is covered with rainbow-colored and unicorn-patterned sheets and pillows, and the overall tone is very dreamy. There is a table lamp with a black and white striped lampshade on the bedside table.\nThen, the camera moves slightly to the right. The girl takes her phone off the windowsill and turns around to unplug the charger. The dog stands on the bed and watches the girl's movements. A green Christmas tree with white decorations appears on the right side of the screen.\nThen, the camera moves to the left. The dog turns around and wags its tail towards the camera, and the girl lowers her head and swipes on her phone. The bedside table in the background is white, with white snowmen and pink and green bird decorations on it.\nFinally, the camera moves in and out, focusing on the puppy. The girl takes a few steps to her right. The puppy sticks out its tongue to sniff the camera. As the camera zooms out, the puppy lowers its head and sniffs the bed.", "events": [ { "event": "The video begins with a Caucasian girl and a dog in a warmly decorated room. The girl has blonde hair that falls over her shoulders. The girl is wearing a rainbow-colored hooded nightgown and stands by the bed. The dog is a fluffy light brown dog standing on the bed. The room has pink walls and several decorative paintings, one of which is a picture of a unicorn. The bed is covered with rainbow-colored and unicorn-patterned sheets and pillows, and the overall tone is very dreamy. There is a table lamp with a black and white striped lampshade on the bedside table.", "visual_elements": [ { "content": "The camera is focused on a little white girl and a dog in a warmly decorated room.", "type": "camera", "weight": 3 }, { "content": "The little girl has blonde hair that falls over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "The girl is wearing a rainbow-colored hooded nightgown.", "type": "attribute", "weight": 3 }, { "content": "The little girl is standing by the bed.", "type": "action", "weight": 3 }, { "content": "The dog has fluffy light brown hair.", "type": "attribute", "weight": 3 }, { "content": "The dog is standing on the bed.", "type": "action", "weight": 2 }, { "content": "The walls of the room are pink.", "type": "scene", "weight": 3 }, { "content": "There are several decorative paintings on the wall, one of which is a picture of a unicorn.", "type": "scene", "weight": 2 }, { "content": "The bed is covered with rainbow-colored and unicorn-patterned sheets and pillows.", "type": "scene", "weight": 3 }, { "content": "There is a lamp with a black and white striped lampshade on the bedside table.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves slightly to the right. The girl takes her phone off the windowsill and turns around to unplug the charger. The dog stands on the bed and watches the girl's movements. A green Christmas tree with white decorations appears on the right side of the screen.", "visual_elements": [ { "content": "Then, the camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The girl takes her phone off the windowsill and turns around to unplug the charger.", "type": "action", "weight": 3 }, { "content": "The dog stands on the bed and watches the girl's actions.", "type": "action", "weight": 2 }, { "content": "A green Christmas tree appears on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "The tree is hung with white ornaments.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves to the left. The dog turns around and wags its tail towards the camera, and the girl lowers her head and swipes on her phone. The bedside table in the background is white, with white snowmen and pink and green bird decorations on it.", "visual_elements": [ { "content": "Then, the camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The dog turns around and wags its tail as it walks toward the camera.", "type": "action", "weight": 3 }, { "content": "The girl lowers her head and swipes on her phone.", "type": "action", "weight": 2 }, { "content": "The bedside table is white.", "type": "scene", "weight": 1 }, { "content": "On the bedside table are white snowmen and pink and green bird ornaments.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera moves in and out, focusing on the puppy. The girl takes a few steps to her right. The puppy sticks out its tongue to sniff the camera. As the camera zooms out, the puppy lowers its head and sniffs the bed.", "visual_elements": [ { "content": "Finally, the camera is aimed at the puppy.", "type": "camera", "weight": 3 }, { "content": "The camera moves in and out.", "type": "camera", "weight": 3 }, { "content": "The girl takes a few steps to her right.", "type": "action", "weight": 2 }, { "content": "The puppy first sticks out its tongue to sniff the camera.", "type": "action", "weight": 3 }, { "content": "After the camera is out, the puppy lowers its head and sniffs the bed.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0827", "video_path": "VIDGEN-1M/g7IKm-ChwAw-Scene-0120.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 23.22, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a young white male riding a small BMX bicycle. The man is wearing a black T-shirt and camouflage pants, a black helmet with red and white patterns, and tattoos on his arms. The frame of the bicycle has white and black patterns. In the background is an open skate park with a concrete floor. There are various skateboarding facilities around the park, including ramps, railings and steps. There are some cyclists on the left side of the picture. The park is surrounded by palm trees, and there are some buildings and billboards in the distance.\nThe camera moves to the right following the man's movements. The camera captures the man's movements from the side. The man approaches a blue railing on his bicycle, then jumps up and uses the front and rear wheels of the bicycle to cross the steps in the middle of the railing. The man lands smoothly on the ground and continues to move forward. Then, the man jumps up again and crosses a slope in the middle of the red railing. Then, the man rides to the steps under a blue curved railing, jumps up and uses the front and rear wheels of the bicycle to cross the steps. The man continues to ride, jumps up and crosses a metal railing on a black and white checkerboard ground. After spinning in the air, the man lands safely and continues to ride to the right side of the screen.\nThen, the camera moves to the right and moves forward towards the man. The man rides to another blue staircase, jumps up and crosses the staircase. The bicycle and the man spin in the air, and after landing, they face the left side of the screen. Then, the man rides the bicycle backward and goes up a slope on the right side of the screen. The man bounces the bicycle up on the slope and spins the handlebars of the bicycle before steadily going down the slope. There are many people on the slope watching the man's performance. In the distance, there is a row of houses on the left side of the screen.\nFinally, the camera follows the man from behind and films his movements. The man rides to the front left of the screen, and turns to the right at the left edge of the screen. There are white letters printed on the black fence on the left side of the screen.", "events": [ { "event": "At the beginning of the video, the camera focuses on a young white male riding a small BMX bicycle. The man is wearing a black T-shirt and camouflage pants, a black helmet with red and white patterns, and tattoos on his arms. The frame of the bicycle has white and black patterns. In the background is an open skate park with a concrete floor. There are various skateboarding facilities around the park, including ramps, railings and steps. There are some cyclists on the left side of the picture. The park is surrounded by palm trees, and there are some buildings and billboards in the distance.", "visual_elements": [ { "content": "The camera focuses on a young white male riding a small BMX bike.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a black T-shirt and camouflage pants, a black helmet with a red and white pattern, and tattoos on his arms.", "type": "attribute", "weight": 3 }, { "content": "The frame of the bike has white and black patterns.", "type": "attribute", "weight": 1 }, { "content": "There are some cyclists on the left side of the screen.", "type": "scene", "weight": 2 }, { "content": "In the background is an open skate park with a concrete floor.", "type": "scene", "weight": 3 }, { "content": "There are various skateboarding facilities around the park, including ramps, railings and steps.", "type": "attribute", "weight": 2 }, { "content": "The park is surrounded by palm trees, and there are some buildings and billboards in the distance.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves to the right following the man's movements. The camera captures the man's movements from the side. The man approaches a blue railing on his bicycle, then jumps up and uses the front and rear wheels of the bicycle to cross the steps in the middle of the railing. The man lands smoothly on the ground and continues to move forward. Then, the man jumps up again and crosses a slope in the middle of the red railing. Then, the man rides to the steps under a blue curved railing, jumps up and uses the front and rear wheels of the bicycle to cross the steps. The man continues to ride, jumps up and crosses a metal railing on a black and white checkerboard ground. After spinning in the air, the man lands safely and continues to ride to the right side of the screen.", "visual_elements": [ { "content": "The camera moves to the right following the man's movements.", "type": "camera", "weight": 3 }, { "content": "The camera captures the man's movements from the side.", "type": "camera", "weight": 2 }, { "content": "The man rides his bike to a blue railing, then jumps up and uses the front and rear wheels of the bike to cross the steps in the middle of the railing.", "type": "action", "weight": 3 }, { "content": "The man lands smoothly on the ground and continues to move forward.", "type": "action", "weight": 3 }, { "content": "Then, the man jumps up again and crosses a slope in the middle of the red railing.", "type": "action", "weight": 3 }, { "content": "Then, the man rides to the steps under a blue curved railing, jumps up and uses the front and rear wheels of the bike to cross the steps.", "type": "action", "weight": 3 }, { "content": "The man continues to ride, jumping up and crossing a metal railing on a black and white checkerboard ground.", "type": "action", "weight": 3 }, { "content": "After a circle in the air, the man lands safely and continues to ride to the right side of the screen.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera moves to the right and moves forward towards the man. The man rides to another blue staircase, jumps up and crosses the staircase. The bicycle and the man spin in the air, and after landing, they face the left side of the screen. Then, the man rides the bicycle backward and goes up a slope on the right side of the screen. The man bounces the bicycle up on the slope and spins the handlebars of the bicycle before steadily going down the slope. There are many people on the slope watching the man's performance. In the distance, there is a row of houses on the left side of the screen.", "visual_elements": [ { "content": "Then, the camera moves to the right and advances toward the man.", "type": "camera", "weight": 3 }, { "content": "The man rides to another blue staircase, jumps up and crosses the staircase.", "type": "action", "weight": 2 }, { "content": "The bicycle and the man rotate in the air and land towards the left side of the screen.", "type": "action", "weight": 3 }, { "content": "Then, the man rides the bicycle backward and goes up a slope on the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The man bounces the bicycle up on the slope and rotates the handlebars of the bicycle before steadily going down the slope.", "type": "action", "weight": 2 }, { "content": "There are many people watching the man's performance on the slope.", "type": "scene", "weight": 2 }, { "content": "In the distance, there is a row of houses on the left side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera follows the man from behind and films his movements. The man rides to the front left of the screen, and turns to the right at the left edge of the screen. There are white letters printed on the black fence on the left side of the screen.", "visual_elements": [ { "content": "Finally, the camera follows the man from behind and shoots his movements.", "type": "camera", "weight": 3 }, { "content": "The man rides to the front left of the screen, and turns to the right side of the screen at the left edge of the screen.", "type": "action", "weight": 2 }, { "content": "There is a white letter pattern printed on the black fence on the left side of the screen.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0828", "video_path": "VIDGEN-1M/gNahM5kJhmk-Scene-0066.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 11.55, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a hand. This hand is holding a can of Spindrift sparkling water. There are red raspberry patterns and text descriptions printed on the can. The background is a blurry indoor environment. The floor is wooden, and some furniture and decorations can be seen in the distance.\nThe camera moves slightly to the right, and the can in the hand also rotates accordingly, showing different sides of the can. The can gets closer to the camera, revealing the upper part. The camera is focused on the nutrition facts list on the can. The list of ingredients includes carbonated water, raspberry juice, cranberry juice, raspberry puree and lemon juice, etc.", "events": [ { "event": "At the beginning of the video, the camera is focused on a hand. This hand is holding a can of Spindrift sparkling water. There are red raspberry patterns and text descriptions printed on the can. The background is a blurry indoor environment. The floor is wooden, and some furniture and decorations can be seen in the distance.", "visual_elements": [ { "content": "The camera is focused on a hand.", "type": "camera", "weight": 3 }, { "content": "This hand is holding a can of Spindrift sparkling water.", "type": "attribute", "weight": 3 }, { "content": "There are red raspberry patterns and text descriptions printed on the can.", "type": "attribute", "weight": 2 }, { "content": "The background is a blurry indoor environment.", "type": "scene", "weight": 1 }, { "content": "The floor is wooden.", "type": "attribute", "weight": 1 }, { "content": "Some furniture and decorations can be seen in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves slightly to the right, and the can in the hand also rotates accordingly, showing different sides of the can. The can gets closer to the camera, revealing the upper part. The camera is focused on the nutrition facts list on the can. The list of ingredients includes carbonated water, raspberry juice, cranberry juice, raspberry puree and lemon juice, etc.", "visual_elements": [ { "content": "The camera moves slightly to the right,.", "type": "camera", "weight": 3 }, { "content": "The can in the hand also rotates accordingly.", "type": "attribute", "weight": 3 }, { "content": "The scene shows different sides of the can.", "type": "attribute", "weight": 2 }, { "content": "The can gets closer to the camera.", "type": "camera", "weight": 3 }, { "content": "The can reveals the upper part.", "type": "attribute", "weight": 1 }, { "content": "The camera is focused on the nutrition facts list on the can.", "type": "camera", "weight": 3 }, { "content": "The list of ingredients includes carbonated water, raspberry juice, cranberry juice, raspberry puree and lemon juice, etc.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0829", "video_path": "VIDGEN-1M/hILWWNHVdDo-Scene-0020.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 5.54, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is aimed at the basketball court. Two teams are competing fiercely. One team is wearing yellow jerseys and the other team is wearing white jerseys. The background is a full stand. The court is composed of a yellow area and a blue three-point area. The end of the blue three-point area has the words \"ACC\". There are referee seats and substitutes next to the court. The substitutes sitting on the bench facing the camera are players wearing white jerseys.\nIn the picture, a player wearing a white jersey is attacking with the ball. Behind him, a player wearing a yellow jersey is blocking him. The camera zooms in. The player wearing a white jersey moves quickly towards the basket. He jumps up and successfully dunks. At this time, the audience, referees and substitutes wearing white jerseys in the background stand up and applaud. There is a time display on the pillar behind the basketball hoop in the background.", "events": [ { "event": "At the beginning of the video, the camera is aimed at the basketball court. Two teams are competing fiercely. One team is wearing yellow jerseys and the other team is wearing white jerseys. The background is a full stand. The court is composed of a yellow area and a blue three-point area. The end of the blue three-point area has the words \"ACC\". There are referee seats and substitutes next to the court. The substitutes sitting on the bench facing the camera are players wearing white jerseys.", "visual_elements": [ { "content": "The camera is aimed at the basketball court.", "type": "camera", "weight": 3 }, { "content": "Two teams are competing fiercely on the court.", "type": "attribute", "weight": 2 }, { "content": "One team is wearing yellow jerseys, and the other team is wearing white jerseys.", "type": "attribute", "weight": 2 }, { "content": "The background is a bleacher full of spectators.", "type": "scene", "weight": 2 }, { "content": "The court is composed of a yellow area and a blue three-point area.", "type": "attribute", "weight": 2 }, { "content": "The word \"ACC\" is written at the end of the blue three-point area.", "type": "attribute", "weight": 2 }, { "content": "There are referee seats and substitutes' seats next to the court.", "type": "attribute", "weight": 2 }, { "content": "The substitute player in a white jersey is sitting on the substitute bench facing the camera.", "type": "attribute", "weight": 2 } ] }, { "event": "In the picture, a player wearing a white jersey is attacking with the ball. Behind him, a player wearing a yellow jersey is blocking him. The camera zooms in. The player wearing a white jersey moves quickly towards the basket. He jumps up and successfully dunks. At this time, the audience, referees and substitutes wearing white jerseys in the background stand up and applaud. There is a time display on the pillar behind the basketball hoop in the background.", "visual_elements": [ { "content": "In the picture, a player in a white jersey is attacking with the ball.", "type": "action", "weight": 3 }, { "content": "Behind the player in the white jersey holding the ball, there is a player in a yellow jersey blocking him.", "type": "action", "weight": 3 }, { "content": "The camera zooms in.", "type": "camera", "weight": 3 }, { "content": "The player in the white jersey moves quickly towards the basket.", "type": "action", "weight": 3 }, { "content": "The player in the white jersey jumps up and successfully dunks.", "type": "action", "weight": 3 }, { "content": "At this time, the spectators, referees and substitutes in white jerseys in the background stand up and applaud.", "type": "action", "weight": 3 }, { "content": "There is a time display on the pillar behind the basketball hoop in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0830", "video_path": "VIDGEN-1M/hO4ROM3HzCM-Scene-0010.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.17, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on an adult guinea pig and two guinea pig pups. The adult guinea pigs have brown, white and black fur, while the pups have mainly black and white fur. The adult guinea pig is eating green leaves, and a pair of hands is gently holding it on the left side of the screen. The pups are below the adult guinea pigs. There are tree trunks and some green plants in the background.\nThe camera moves slightly to the left. The camera focuses on the two pups. The pup on the right side of the screen crawls out from under the adult guinea pig. The pup on the left side of the screen looks forward. The hand holding the adult guinea pig wears a black watch.\nThe camera moves slightly to the right and focuses on the adult guinea pig. The adult guinea pig's head reenters the frame. The adult guinea pig is eating green leaves. A hand with black nails appears on the right side of the screen.\nFinally, the camera moves to the left again and zooms in slightly, focusing on the pups. The pups look around, and the adult guinea pigs above them suddenly move back and squeeze the pups. The hand on the right side of the screen disappears. At the end of the video, the camera zooms out slightly.", "events": [ { "event": "The video begins with the camera focusing on an adult guinea pig and two guinea pig pups. The adult guinea pigs have brown, white and black fur, while the pups have mainly black and white fur. The adult guinea pig is eating green leaves, and a pair of hands is gently holding it on the left side of the screen. The pups are below the adult guinea pigs. There are tree trunks and some green plants in the background.", "visual_elements": [ { "content": "The camera is focused on an adult guinea pig and two baby guinea pigs.", "type": "camera", "weight": 3 }, { "content": "The adult guinea pig has brown, white and black fur.", "type": "attribute", "weight": 3 }, { "content": "The baby guinea pig has mainly black and white fur.", "type": "attribute", "weight": 2 }, { "content": "The adult guinea pig is eating green leaves.", "type": "action", "weight": 3 }, { "content": "A pair of hands is gently holding the adult guinea pig on the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "The baby guinea pig is under the adult guinea pig.", "type": "attribute", "weight": 2 }, { "content": "There are tree trunks and some green plants in the background.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves slightly to the left. The camera focuses on the two pups. The pup on the right side of the screen crawls out from under the adult guinea pig. The pup on the left side of the screen looks forward. The hand holding the adult guinea pig wears a black watch.", "visual_elements": [ { "content": "The camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the two baby guinea pigs.", "type": "camera", "weight": 3 }, { "content": "The baby guinea pig on the right side of the screen crawls out from under the adult guinea pig.", "type": "action", "weight": 3 }, { "content": "The baby guinea pig on the left side of the screen looks forward.", "type": "action", "weight": 3 }, { "content": "The hand holding the adult guinea pig wears a black watch.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves slightly to the right and focuses on the adult guinea pig. The adult guinea pig's head reenters the frame. The adult guinea pig is eating green leaves. A hand with black nails appears on the right side of the screen.", "visual_elements": [ { "content": "The camera moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the adult guinea pig.", "type": "camera", "weight": 3 }, { "content": "The adult guinea pig's head re-enters the picture.", "type": "attribute", "weight": 2 }, { "content": "The adult guinea pig is eating green leaves.", "type": "action", "weight": 3 }, { "content": "A hand with black nails appears on the right side of the screen.", "type": "attribute", "weight": 1 } ] }, { "event": "Finally, the camera moves to the left again and zooms in slightly, focusing on the pups. The pups look around, and the adult guinea pigs above them suddenly move back and squeeze the pups. The hand on the right side of the screen disappears. At the end of the video, the camera zooms out slightly.", "visual_elements": [ { "content": "Finally, the camera moves to the left again and zooms in slightly.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the baby guinea pig again.", "type": "camera", "weight": 2 }, { "content": "The baby guinea pig looks around.", "type": "action", "weight": 2 }, { "content": "The adult guinea pig above suddenly moves back and squeezes the baby guinea pig.", "type": "action", "weight": 1 }, { "content": "The hand on the right side of the screen disappears.", "type": "attribute", "weight": 1 }, { "content": "At the end of the video, the camera zooms out slightly.", "type": "camera", "weight": 1 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0831", "video_path": "VIDGEN-1M/h_pYy_b9HYc-Scene-0012.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 16.52, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the back seat area of a white sedan. The door of the car is open and the camera shoots from the outside of the car to the inside. A sheet of vehicle information is taped to the window and other vehicles can be seen parked in the background.\nThe camera moves slowly to the left, gradually approaching the interior of the car, where the seats come with distinctive stitching and contour strips, and seat belts are neatly placed on either side of the seats. The black trim on the inside of the doors and the door handles also appear in the shot.\nNext, the camera moves to the right, where the front seats are also wrapped in red leather with black trim panels on the back of the seats and the front steering wheel can be seen.\nFinally, the camera moves back and gradually exits the car, this time with a slightly downward view and black carpeting under the seats in the frame.", "events": [ { "event": "The video begins with the camera focusing on the back seat area of a white sedan. The door of the car is open and the camera shoots from the outside of the car to the inside. A sheet of vehicle information is taped to the window and other vehicles can be seen parked in the background.", "visual_elements": [ { "content": "The camera focuses on the back seat area of a white car.", "type": "camera", "weight": 3 }, { "content": "The door is open.", "type": "attribute", "weight": 2 }, { "content": "The camera looks in from the outside of the car.", "type": "camera", "weight": 3 }, { "content": "A sheet of vehicle information is taped to the window of the car.", "type": "attribute", "weight": 2 }, { "content": "Other parked cars can be seen in the background.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves slowly to the left, gradually approaching the interior of the car, where the seats come with distinctive stitching and contour strips, and seat belts are neatly placed on either side of the seats. The black trim on the inside of the doors and the door handles also appear in the shot.", "visual_elements": [ { "content": "The camera slowly moves to the left.", "type": "camera", "weight": 3 }, { "content": "The camera gradually approaches the interior of the car.", "type": "camera", "weight": 3 }, { "content": "The seats have visible stitching and contour lines.", "type": "attribute", "weight": 2 }, { "content": "Seat belts are neatly placed on both sides of the seats.", "type": "attribute", "weight": 2 }, { "content": "The black trim on the inside of the doors and the door handles are also seen in the shot.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera moves to the right, where the front seats are also wrapped in red leather with black trim panels on the back of the seats and the front steering wheel can be seen.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The front seats are also wrapped in red leather.", "type": "attribute", "weight": 2 }, { "content": "The backs of the seats have a black veneer.", "type": "attribute", "weight": 2 }, { "content": "The front steering wheel can be seen.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera moves back and gradually exits the car, this time with a slightly downward view and black carpeting under the seats in the frame.", "visual_elements": [ { "content": "The camera moves backward.", "type": "camera", "weight": 3 }, { "content": "The camera gradually exits the car.", "type": "camera", "weight": 3 }, { "content": "The angle of view is slightly downward.", "type": "camera", "weight": 3 }, { "content": "Black carpet underneath the seats is in the frame", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0832", "video_path": "VIDGEN-1M/hkvcg8-6JjU-Scene-0021.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 6.61, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a green motorcycle, which is parked on a concrete patch.\nStarting at the rear of the motorcycle, the camera pulls away from the motorcycle, which has a green body with smooth body lines, a large and wide rear tire, and a relatively small exhaust pipe. The motorcycle also has some chrome trim on the body and the seat is black.\nAs the camera pulls away, several parked cars can be seen in the background, one of which is a red sports car and the other is a blue sedan. There are some oil and stains on the ground.", "events": [ { "event": "The video begins with the camera focusing on a green motorcycle, which is parked on a concrete patch.", "visual_elements": [ { "content": "The camera focuses on a green motorcycle.", "type": "camera", "weight": 3 }, { "content": "The motorcycle is parked on a concrete floor.", "type": "attribute", "weight": 3 } ] }, { "event": "Starting at the rear of the motorcycle, the camera pulls away from the motorcycle, which has a green body with smooth body lines, a large and wide rear tire, and a relatively small exhaust pipe. The motorcycle also has some chrome trim on the body and the seat is black.", "visual_elements": [ { "content": "The camera pulls away.", "type": "camera", "weight": 3 }, { "content": "The motorcycle's body is green.", "type": "attribute", "weight": 3 }, { "content": "The lines of the body are smooth.", "type": "attribute", "weight": 2 }, { "content": "The rear tire is large and wide.", "type": "attribute", "weight": 2 }, { "content": "The exhaust pipe is small.", "type": "attribute", "weight": 2 }, { "content": "The motorcycle has some chrome trim on the body.", "type": "attribute", "weight": 1 }, { "content": "The seat is black.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera pulls away, several parked cars can be seen in the background, one of which is a red sports car and the other is a blue sedan. There are some oil and stains on the ground.", "visual_elements": [ { "content": "The camera is zooming out.", "type": "camera", "weight": 3 }, { "content": "Several parked cars can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "One of them is a red sports car.", "type": "attribute", "weight": 2 }, { "content": "The other is a blue sedan.", "type": "attribute", "weight": 2 }, { "content": "There are some oil and stains on the ground.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0833", "video_path": "VIDGEN-1M/i1k5OGMPjYw-Scene-0066.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 12.47, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young man. The man is facing the camera, standing on an escalator running upwards. The man has a black beard, wears a white T-shirt and light-colored pants, and carries a black backpack. The man's right arm is holding the escalator handrail, and there is a clear tattoo on his right arm. In the background, on the left side of the escalator is another escalator running downwards, with metal handrails on both sides of the escalator. On the right side are some shops, and the shop signs have Vietnamese logos.\nThe camera follows the man on the elevator. He gestures with his left hand as he talks. People keep passing by in the background. At the end of the video, a short-haired woman in black walks onto the escalator next to him.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young man. The man is facing the camera, standing on an escalator running upwards. The man has a black beard, wears a white T-shirt and light-colored pants, and carries a black backpack. The man's right arm is holding the escalator handrail, and there is a clear tattoo on his right arm. In the background, on the left side of the escalator is another escalator running downwards, with metal handrails on both sides of the escalator. On the right side are some shops, and the shop signs have Vietnamese logos.", "visual_elements": [ { "content": "The camera is focused on a young man.", "type": "camera", "weight": 3 }, { "content": "The man is facing the camera, standing on an escalator that is moving upwards.", "type": "attribute", "weight": 3 }, { "content": "The man has a black beard, wears a white T-shirt and light-colored pants, and carries a black backpack.", "type": "attribute", "weight": 2 }, { "content": "The man has a prominent tattoo on his right arm.", "type": "attribute", "weight": 1 }, { "content": "The man is holding the escalator with his right hand.", "type": "action", "weight": 2 }, { "content": "In the background, on the left side of the escalator is another escalator that is moving downwards.", "type": "scene", "weight": 2 }, { "content": "There are metal handrails on both sides of the escalator.", "type": "scene", "weight": 1 }, { "content": "There are some shops on the right side of the screen.", "type": "scene", "weight": 2 }, { "content": "There are Vietnamese signs on the shop signs.", "type": "scene", "weight": 1 } ] }, { "event": "The camera follows the man on the elevator. He gestures with his left hand as he talks. People keep passing by in the background. At the end of the video, a short-haired woman in black walks onto the escalator next to him.", "visual_elements": [ { "content": "The camera follows the man on the escalator.", "type": "camera", "weight": 3 }, { "content": "The man gestures with his left hand while talking.", "type": "action", "weight": 3 }, { "content": "Pedestrians keep passing by in the background.", "type": "scene", "weight": 1 }, { "content": "At the end of the video, a short-haired woman in black walks onto the escalator that is moving downwards next to it.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0834", "video_path": "VIDGEN-1M/i2bR5AESzyg-Scene-0012.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 3.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a green meadow with some clover and other weeds. The grass blades are of different shapes, and some of them have traces of dew. In the center of the picture, there is a hand with a lighter skin color and a partially exposed arm.\nThen the camera moves downward, and the object under the hand gradually becomes clear. It is a small frog. The frog is light green. The frog jumps forward and is then held down by the hand.\nFinally, the camera moves slightly to the right, following the frog's movements. The frog is pressed by the hand and jumps to the right of the screen. Then the hand pinches the frog and lifts it from the grass.", "events": [ { "event": "At the beginning of the video, the camera is focused on a green meadow with some clover and other weeds. The grass blades are of different shapes, and some of them have traces of dew. In the center of the picture, there is a hand with a lighter skin color and a partially exposed arm.", "visual_elements": [ { "content": "The camera is pointed at a green meadow.", "type": "camera", "weight": 3 }, { "content": "There are some clovers and other weeds on the meadow.", "type": "scene", "weight": 2 }, { "content": "The blades of grass are of different shapes, and some of them have traces of dew.", "type": "scene", "weight": 1 }, { "content": "There is a light-skinned hand in the center of the picture.", "type": "attribute", "weight": 3 }, { "content": "The arm is partially exposed.", "type": "attribute", "weight": 2 } ] }, { "event": "Then the camera moves downward, and the object under the hand gradually becomes clear. It is a small frog. The frog is light green. The frog jumps forward and is then held down by the hand.", "visual_elements": [ { "content": "The camera moves downward, and the object under the hand gradually becomes clear.", "type": "camera", "weight": 3 }, { "content": "It is a small frog.", "type": "attribute", "weight": 3 }, { "content": "The frog is light green.", "type": "attribute", "weight": 1 }, { "content": "The frog jumps forward and is then held down by the hand.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the camera moves slightly to the right, following the frog's movements. The frog is pressed by the hand and jumps to the right of the screen. Then the hand pinches the frog and lifts it from the grass.", "visual_elements": [ { "content": "Finally, the camera moves slightly to the right, shaking with the frog's movements.", "type": "camera", "weight": 3 }, { "content": "The frog is held down by the hand and jumps to the right of the picture.", "type": "action", "weight": 2 }, { "content": "The hand pinches the frog and lifts it from the grass.", "type": "action", "weight": 3 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0835", "video_path": "VIDGEN-1M/iWADvm6DLn0-Scene-0011.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 20.49, "resolution": { "width": 1280, "height": 720 }, "caption": "The video starts off with the camera focusing on a rocky path, surrounded by rocks of different sizes, the rocky path is covered with some yellow fallen leaves and the path is next to bare trees.\nThe camera moves in the direction of the path, there is a small bend in the path in the frame and the camera keeps shaking as the number of trees in the distance increases.", "events": [ { "event": "The video starts off with the camera focusing on a rocky path, surrounded by rocks of different sizes, the rocky path is covered with some yellow fallen leaves and the path is next to bare trees.", "visual_elements": [ { "content": "The camera focuses on a rocky path.", "type": "camera", "weight": 3 }, { "content": "It is surrounded by rocks of different sizes.", "type": "scene", "weight": 2 }, { "content": "The steps are covered with yellow leaves.", "type": "scene", "weight": 2 }, { "content": "Next to the path are bare trees.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves in the direction of the path, there is a small bend in the path in the frame and the camera keeps shaking as the number of trees in the distance increases.", "visual_elements": [ { "content": "The camera moves in the direction of the path.", "type": "camera", "weight": 3 }, { "content": "There's a small bend in the path.", "type": "scene", "weight": 3 }, { "content": "The camera keeps shifting the number of trees in the distance increases.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0836", "video_path": "VIDGEN-1M/ixJbnSoFtJk-Scene-0025.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 4.77, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, two middle-aged white men are fighting in a UFC match. Both of them are bald and stand barefoot on a blue mat. The man on the left is wearing a black T-shirt with a red pattern and camouflage shorts, and black and white boxing gloves. The man on the right is wearing a gray vest and black shorts with white stripes, holding black protective gear. The venue is in a large indoor venue. A large UFC logo and advertising screen are hung on the left side of the screen. The venue lights are yellow. Most of the audience members are wearing casual clothes, some are taking pictures, and some are watching.\nThe man in the black T-shirt raises his right leg and quickly kicks the protective gear of the man on the right. Each kick of the man in black is very powerful, and the man on the right slightly retreats to buffer the force. Then, the man in black alternates between kicking and punching. The man on the right always maintains a stable posture and raises his protective gear to protect himself. The camera finally moves slightly to the right.", "events": [ { "event": "At the beginning of the video, two middle-aged white men are fighting in a UFC match. Both of them are bald and stand barefoot on a blue mat. The man on the left is wearing a black T-shirt with a red pattern and camouflage shorts, and black and white boxing gloves. The man on the right is wearing a gray vest and black shorts with white stripes, holding black protective gear. The venue is in a large indoor venue. A large UFC logo and advertising screen are hung on the left side of the screen. The venue lights are yellow. Most of the audience members are wearing casual clothes, some are taking pictures, and some are watching.", "visual_elements": [ { "content": "In the picture, two middle-aged white men are fighting in a UFC match.", "type": "action", "weight": 3 }, { "content": "Both men are bald and barefoot standing on a blue mat.", "type": "attribute", "weight": 3 }, { "content": "The man on the left is wearing a black T-shirt with a red pattern and camouflage shorts, and wearing black and white boxing gloves.", "type": "attribute", "weight": 3 }, { "content": "The man on the right is wearing a gray vest and black shorts with white stripes, holding black protective gear.", "type": "attribute", "weight": 3 }, { "content": "The venue is in a large indoor venue.", "type": "scene", "weight": 3 }, { "content": "A large UFC logo and advertising screen are hung on the left side of the picture.", "type": "scene", "weight": 2 }, { "content": "The venue lights are yellow.", "type": "scene", "weight": 1 }, { "content": "Most of the audience members are wearing casual clothes, some are taking pictures, and some are watching.", "type": "scene", "weight": 1 } ] }, { "event": "The man in the black T-shirt raises his right leg and quickly kicks the protective gear of the man on the right. Each kick of the man in black is very powerful, and the man on the right slightly retreats to buffer the force. Then, the man in black alternates between kicking and punching. The man on the right always maintains a stable posture and raises his protective gear to protect himself. The camera finally moves slightly to the right.", "visual_elements": [ { "content": "The man in the black T-shirt raises his right leg and quickly kicks the protective gear of the man on the right.", "type": "action", "weight": 3 }, { "content": "Every kick of the man in black is very powerful, and the man on the right slightly retreats to buffer the force.", "type": "action", "weight": 2 }, { "content": "Then, the man in black alternates between kicking and punching.", "type": "action", "weight": 2 }, { "content": "The man on the right always maintains a stable posture and raises his protective gear to protect himself.", "type": "action", "weight": 1 }, { "content": "The camera finally moves slightly to the right.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0837", "video_path": "VIDGEN-1M/kXVmVGx1aM8-Scene-0039.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 7.12, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with two middle-aged male fighters competing in a mixed martial arts match. The man on the left side of the screen is wearing blue shorts and a red wristband. The man on the right side of the screen is wearing a blue wristband, a thick beard, and black shorts. Both are shirtless and wearing black boxing gloves with white letters. Both men stand barefoot in the octagonal cage with their hands raised. There are advertising signs of different colors on the white ground. There are spectators watching the match outside the fence.\nThe camera moves to the left as the two men move. The man on the right begins to attack the man on the left. The man on the right quickly moves forward and swings his left fist at the man's head. The man on the left blocks with his hand, but the fist of the man on the right still hits the man on the left in the face.\nNext, the camera moves slightly to the left and back. The man on the right throws a left fist followed by a right fist, hitting the man on the left in the head again. The man on the left is forced to retreat, close to the edge of the octagon.", "events": [ { "event": "The video begins with two middle-aged male fighters competing in a mixed martial arts match. The man on the left side of the screen is wearing blue shorts and a red wristband. The man on the right side of the screen is wearing a blue wristband, a thick beard, and black shorts. Both are shirtless and wearing black boxing gloves with white letters. Both men stand barefoot in the octagonal cage with their hands raised. There are advertising signs of different colors on the white ground. There are spectators watching the match outside the fence.", "visual_elements": [ { "content": "Two middle-aged male fighters are fighting in a mixed martial arts match.", "type": "action", "weight": 3 }, { "content": "The man on the left side of the picture is wearing blue shorts and a red bracelet.", "type": "attribute", "weight": 3 }, { "content": "The man on the right side of the picture is wearing a blue bracelet, a thick beard, and black shorts.", "type": "attribute", "weight": 3 }, { "content": "Both men are shirtless and wearing black boxing gloves with white letters.", "type": "attribute", "weight": 1 }, { "content": "Both men stand barefoot in the octagon with their hands raised.", "type": "action", "weight": 3 }, { "content": "There are advertising signs of different colors on the white ground.", "type": "scene", "weight": 2 }, { "content": "There are spectators watching the match outside the fence.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves to the left as the two men move. The man on the right begins to attack the man on the left. The man on the right quickly moves forward and swings his left fist at the man's head. The man on the left blocks with his hand, but the fist of the man on the right still hits the man on the left in the face.", "visual_elements": [ { "content": "The camera moves to the left as the two men move.", "type": "camera", "weight": 3 }, { "content": "The man on the right begins to attack the man on the left.", "type": "action", "weight": 3 }, { "content": "The man on the right quickly moves forward and swings his left fist at the man's head.", "type": "action", "weight": 3 }, { "content": "The man on the left blocks with his hand, but the man on the right's fist still hits the man on the left in the face.", "type": "action", "weight": 2 } ] }, { "event": "Next, the camera moves slightly to the left and back. The man on the right throws a left fist followed by a right fist, hitting the man on the left in the head again. The man on the left is forced to retreat, close to the edge of the octagon.", "visual_elements": [ { "content": "Next, the camera moves slightly to the left and back.", "type": "camera", "weight": 3 }, { "content": "The man on the right swings his left fist and then his right fist, hitting the man on the left in the head again.", "type": "action", "weight": 3 }, { "content": "The man on the left is forced to retreat and approach the edge of the octagonal cage on the left side of the screen.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0838", "video_path": "VIDGEN-1M/kYRCi8Ob56U-Scene-0005.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic,Multi-Subject", "domain": "Sports Activity", "duration": 12.15, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is looking down at a boxing match. Two middle-aged male boxers are fighting fiercely in the boxing ring. The black boxer on the left is wearing red shorts with black patterns. The white boxer on the right is wearing blue shorts with white letters and has a prominent tattoo on his back. Both boxers are wearing black boxing gloves. A middle-aged male referee in a blue shirt stands in the lower left corner of the screen, watching the match closely. The floor of the boxing ring is blue with yellow, white and red words printed on it. The boxing ring is surrounded by black ropes. A circle of staff in formal clothes sits under the boxing ring. There are papers and pens in front of the staff. There are spectators behind the staff.\nThe camera moves to the right front. The red boxer leans against the corner of the ring, while the boxer in blue shorts keeps attacking, punching his opponent's head and body. The boxer in red shorts tries to defend and counterattack, but is on the defensive most of the time. Finally, the red boxer hugs the blue boxer's waist, and the referee approaches the two entangled boxers and stands on the right side of the screen. The staff and spectators sit around the ring and watch the game carefully.", "events": [ { "event": "At the beginning of the video, the camera is looking down at a boxing match. Two middle-aged male boxers are fighting fiercely in the boxing ring. The black boxer on the left is wearing red shorts with black patterns. The white boxer on the right is wearing blue shorts with white letters and has a prominent tattoo on his back. Both boxers are wearing black boxing gloves. A middle-aged male referee in a blue shirt stands in the lower left corner of the screen, watching the match closely. The floor of the boxing ring is blue with yellow, white and red words printed on it. The boxing ring is surrounded by black ropes. A circle of staff in formal clothes sits under the boxing ring. There are papers and pens in front of the staff. There are spectators behind the staff.", "visual_elements": [ { "content": "The camera is looking down at an indoor boxing match.", "type": "camera", "weight": 3 }, { "content": "Two middle-aged male boxers are fighting fiercely in the boxing ring.", "type": "action", "weight": 3 }, { "content": "The black boxer on the left is wearing red shorts with black patterns.", "type": "attribute", "weight": 3 }, { "content": "The white boxer on the right is wearing blue shorts with white letters and a prominent tattoo on his back.", "type": "attribute", "weight": 3 }, { "content": "Both boxers are wearing black boxing gloves.", "type": "attribute", "weight": 1 }, { "content": "A middle-aged male referee in a blue shirt stands in the lower left corner of the picture, watching the match closely.", "type": "attribute", "weight": 2 }, { "content": "The floor of the boxing ring is blue with yellow, white and red words printed on it.", "type": "scene", "weight": 3 }, { "content": "The boxing ring is surrounded by black ropes.", "type": "scene", "weight": 2 }, { "content": "A circle of staff in formal clothes sits around the boxing ring.", "type": "scene", "weight": 2 }, { "content": "There are papers and pens in front of the staff.", "type": "scene", "weight": 1 }, { "content": "There are spectators behind the staff.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves to the right front. The red boxer leans against the corner of the ring, while the boxer in blue shorts keeps attacking, punching his opponent's head and body. The boxer in red shorts tries to defend and counterattack, but is on the defensive most of the time. Finally, the red boxer hugs the blue boxer's waist, and the referee approaches the two entangled boxers and stands on the right side of the screen. The staff and spectators sit around the ring and watch the game carefully.", "visual_elements": [ { "content": "The camera moves to the right front.", "type": "camera", "weight": 3 }, { "content": "The red boxer leans against the corner of the boxing ring.", "type": "action", "weight": 3 }, { "content": "The boxer in blue shorts is constantly attacking, punching his opponent's head and body.", "type": "action", "weight": 3 }, { "content": "The boxer in red shorts tries to defend and counterattack, but is on the defensive most of the time.", "type": "action", "weight": 2 }, { "content": "Finally, the red boxer hugs the blue boxer's waist.", "type": "action", "weight": 3 }, { "content": "The referee approaches the two entangled boxers and stands on the right side of the picture.", "type": "action", "weight": 2 }, { "content": "The staff and spectators sit around the boxing ring and watch the match carefully.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 18 }, { "index": "TUNA_0839", "video_path": "VIDGEN-1M/laNKo2-D7eU-Scene-0061.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 6.21, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the back seat of a car with a black leather seat with gray stitching. There is no clutter on the seat, and the seatbelt buckle on the seatback and the seatbelt socket on the bottom of the seat can be seen.\nNext, the camera backs up and pans to the door side of the car, showing part of the front seat of the car, with a black circle next to the seat backrest and a gray seatbelt running through the circle. There is also a person's left hand resting next to the seat.", "events": [ { "event": "The video begins with the camera focusing on the back seat of a car with a black leather seat with gray stitching. There is no clutter on the seat, and the seatbelt buckle on the seatback and the seatbelt socket on the bottom of the seat can be seen.", "visual_elements": [ { "content": "The camera focuses on the back seat of a car.", "type": "camera", "weight": 3 }, { "content": "The seats are black leather.", "type": "attribute", "weight": 3 }, { "content": "The seat has gray stitching.", "type": "attribute", "weight": 2 }, { "content": "There's no clutter on the seat.", "type": "attribute", "weight": 2 }, { "content": "The seatbelt buckle on the seatback and the seatbelt socket on the bottom of the seat can be seen.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera backs up and pans to the door side of the car, showing part of the front seat of the car, with a black circle next to the seat backrest and a gray seatbelt running through the circle. There is also a person's left hand resting next to the seat.", "visual_elements": [ { "content": "The camera moves back.", "type": "camera", "weight": 3 }, { "content": "The camera pans to the door side of the car.", "type": "camera", "weight": 3 }, { "content": "The camera shows part of the front seat of the car.", "type": "attribute", "weight": 1 }, { "content": "There's a black ring next to the seat backrest.", "type": "attribute", "weight": 2 }, { "content": "Inside the ring is a gray seat belt that goes through.", "type": "attribute", "weight": 2 }, { "content": "A person's left hand rests on the side of the seat.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0840", "video_path": "VIDGEN-1M/mEZbljxnr84-Scene-0048.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 14.68, "resolution": { "width": 1280, "height": 720 }, "caption": "The video shows a show jumping competition at an indoor equestrian arena. In the middle of the arena is a young white rider wearing a blue top and white riding pants. The rider wears a black helmet with a white pattern and rides a brown horse with a black mane. The rider is riding to the left of the frame, preparing to jump. There are multiple obstacles on the sandy ground of the arena, mainly hurdles. There are white fences and blue billboards with white writing around the arena. Several people sit on blue and yellow seats behind the fence to watch the competition.\nThe camera follows the movements of the rider and horse, moving slightly to the left. The rider and horse approach the first white jump hurdle with red, yellow and blue stripes. The horse jumps over the obstacle easily. The rider keeps his balance and leans forward. Then, the rider and horse move quickly in the middle of the field facing the camera, preparing to jump the next obstacle.\nThen, the camera moves to the right, following the movements of the rider and the horse. Guided by the rider, the horse turns to the right of the frame, then approaches the blue barrier with white stripes with its back to the camera. Green plants are placed on both sides of the blue barrier. The barriers in the venue are of various designs, some are decorated with flowers, and some have horse head patterns on them.\nThen, the camera shoots from behind, moving forward with the movements of the rider and horse. The rider and horse jump over three blue obstacles with white stripes and land steadily.\nFinally, the rider rides to the right side of the screen. The camera also moves slightly to the left. On the left side of the screen, some spectators sit on the white railings of the auditorium, and some stand behind the railings to watch the competition.", "events": [ { "event": "The video shows a show jumping competition at an indoor equestrian arena. In the middle of the arena is a young white rider wearing a blue top and white riding pants. The rider wears a black helmet with a white pattern and rides a brown horse with a black mane. The rider is riding to the left of the frame, preparing to jump. There are multiple obstacles on the sandy ground of the arena, mainly hurdles. There are white fences and blue billboards with white writing around the arena. Several people sit on blue and yellow seats behind the fence to watch the competition.", "visual_elements": [ { "content": "The video shows a jumping competition in an indoor equestrian arena.", "type": "camera", "weight": 3 }, { "content": "A young white rider wearing a blue top and white riding pants is in the middle of the arena.", "type": "attribute", "weight": 3 }, { "content": "The rider wears a black helmet with a white pattern and rides a brown horse with a black mane.", "type": "attribute", "weight": 2 }, { "content": "The rider rides to the left of the frame, preparing to jump.", "type": "action", "weight": 3 }, { "content": "There are multiple obstacles on the sandy ground in the arena, mainly jumping hurdles.", "type": "scene", "weight": 3 }, { "content": "There are white fences and blue billboards with white writing around the arena.", "type": "scene", "weight": 2 }, { "content": "Several people sit on blue and yellow seats behind the fence to watch the competition.", "type": "scene", "weight": 1 } ] }, { "event": "The camera follows the movements of the rider and horse, moving slightly to the left. The rider and horse approach the first white jump hurdle with red, yellow and blue stripes. The horse jumps over the obstacle easily. The rider keeps his balance and leans forward. Then, the rider and horse move quickly in the middle of the field facing the camera, preparing to jump the next obstacle.", "visual_elements": [ { "content": "The camera follows the movements of the rider and horse, moving slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The rider and horse approach the first white jumping hurdle with red, yellow and blue stripes.", "type": "action", "weight": 2 }, { "content": "The horse jumps over the obstacle easily.", "type": "action", "weight": 3 }, { "content": "The rider keeps his balance and leans forward.", "type": "action", "weight": 2 }, { "content": "The rider and horse then move quickly in the middle of the arena facing the camera, preparing to jump the next obstacle.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera moves to the right, following the movements of the rider and the horse. Guided by the rider, the horse turns to the right of the frame, then approaches the blue barrier with white stripes with its back to the camera. Green plants are placed on both sides of the blue barrier. The barriers in the venue are of various designs, some are decorated with flowers, and some have horse head patterns on them.", "visual_elements": [ { "content": "Then, the camera moves to the right following the movements of the rider and the horse.", "type": "action", "weight": 3 }, { "content": "The horse turns to the right side of the screen under the guidance of the rider.", "type": "action", "weight": 3 }, { "content": "The horse and the rider approach the blue obstacle with white stripes with their backs to the camera.", "type": "action", "weight": 2 }, { "content": "Green plants are placed on both sides of the blue obstacle.", "type": "scene", "weight": 1 }, { "content": "The obstacles in the venue are of various designs, some are decorated with flowers, and some have horse head patterns.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera shoots from behind, moving forward with the movements of the rider and horse. The rider and horse jump over three blue obstacles with white stripes and land steadily.", "visual_elements": [ { "content": "Then, the camera shoots from the back, moving forward with the movements of the rider and the horse.", "type": "camera", "weight": 3 }, { "content": "The rider and the horse jump over three blue obstacles with white stripes in a row and land steadily.", "type": "action", "weight": 3 } ] }, { "event": "Finally, the rider rides to the right side of the screen. The camera also moves slightly to the left. On the left side of the screen, some spectators sit on the white railings of the auditorium, and some stand behind the railings to watch the competition.", "visual_elements": [ { "content": "Finally, the rider rides to the right side of the screen.", "type": "action", "weight": 1 }, { "content": "The camera also moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "On the left side of the screen, some spectators sit on the white railings of the audience seats, and some stand behind the railings to watch the game.", "type": "scene", "weight": 1 } ] } ], "n_events": 5, "n_elements": 22 }, { "index": "TUNA_0841", "video_path": "VIDGEN-1M/mMlNptqerDU-Scene-0024.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 11.34, "resolution": { "width": 1280, "height": 720 }, "caption": "The footage is captured at night. At the beginning of the video, the camera focuses on the front of a house. The main door of the house is dark-colored with two large white labels hanging on it. To the left of the door, there is a rectangular glass window, and a lit lamp hangs from the ceiling in front of the door. On the right side of the house, there are two adjacent large windows, with a row of buried lights emitting green light beneath them.\nAs the camera pans to the left, the wall on the left side of the house gradually enters the scene. This part of the wall is illuminated by red underground lights. There is a large glass window on the left, with a black grid structure behind it. In the foreground, some bushes and branches of trees in front of the house can be seen.", "events": [ { "event": "The footage is captured at night. At the beginning of the video, the camera focuses on the front of a house. The main door of the house is dark-colored with two large white labels hanging on it. To the left of the door, there is a rectangular glass window, and a lit lamp hangs from the ceiling in front of the door. On the right side of the house, there are two adjacent large windows, with a row of buried lights emitting green light beneath them.", "visual_elements": [ { "content": "The footage is captured at night.", "type": "scene", "weight": 2 }, { "content": "The camera focuses on the front of a house.", "type": "camera", "weight": 3 }, { "content": "The front door of the house is dark-colored.", "type": "attribute", "weight": 2 }, { "content": "Two large white tags hang on the door.", "type": "attribute", "weight": 2 }, { "content": "To the left of the door, there is a rectangular glass window.", "type": "attribute", "weight": 2 }, { "content": "A lit lamp hangs from the ceiling in front of the door.", "type": "attribute", "weight": 2 }, { "content": "There are two adjacent large windows on the right side of the house.", "type": "attribute", "weight": 2 }, { "content": "A row of green underground lights shine beneath the windows.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera pans to the left, the wall on the left side of the house gradually enters the scene. This part of the wall is illuminated by red underground lights. There is a large glass window on the left, with a black grid structure behind it. In the foreground, some bushes and branches of trees in front of the house can be seen.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The wall on the left side of the house gradually enters the scene.", "type": "attribute", "weight": 2 }, { "content": "This part of the wall is illuminated by red underground lights.", "type": "attribute", "weight": 2 }, { "content": "There is a large glass window on the left.", "type": "attribute", "weight": 2 }, { "content": "Behind the window is a black grid structure.", "type": "attribute", "weight": 1 }, { "content": "In the foreground, some bushes and branches of trees in front of the house can be seen.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0842", "video_path": "VIDGEN-1M/mzToDzOZz_M-Scene-0006.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 13.65, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white woman wearing a black sleeveless top, holding a black microphone with the words \"POWER AUTOMEDIA\" in white. Her hair is blond and falls on her shoulders. The woman passes the microphone to a middle-aged bald white man opposite. The man is wearing a white shirt with a name tag on it. In the background is a blue display board with \"BILSTEIN SUSPENSION\" written on it, and some suspension system parts are displayed next to it. In the background is a man wearing a red short-sleeved shirt with his back to the camera.\nThe camera pans left. The camera focuses on the man on the right side of the frame. The man is talking and the woman nods occasionally. In the background, there are other people wearing red and white clothes walking around the exhibition area. There are more display panels and suspension system parts in the frame.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white woman wearing a black sleeveless top, holding a black microphone with the words \"POWER AUTOMEDIA\" in white. Her hair is blond and falls on her shoulders. The woman passes the microphone to a middle-aged bald white man opposite. The man is wearing a white shirt with a name tag on it. In the background is a blue display board with \"BILSTEIN SUSPENSION\" written on it, and some suspension system parts are displayed next to it. In the background is a man wearing a red short-sleeved shirt with his back to the camera.", "visual_elements": [ { "content": "The camera is focused on a young white woman wearing a black sleeveless top.", "type": "camera", "weight": 3 }, { "content": "The woman has blonde hair that falls on her shoulders.", "type": "attribute", "weight": 2 }, { "content": "The woman is holding a black microphone with the words \"POWER AUTOMEDIA\" written on it in white.", "type": "attribute", "weight": 3 }, { "content": "The woman passes the microphone to a middle-aged white man across from her.", "type": "action", "weight": 3 }, { "content": "The white man has a shaved head and wears a white shirt with a name tag on it.", "type": "attribute", "weight": 3 }, { "content": "In the background there is a blue display board with \"BILSTEIN SUSPENSION\" written on it.", "type": "scene", "weight": 3 }, { "content": "Some suspension system parts are displayed next to the display board.", "type": "scene", "weight": 2 }, { "content": "In the background there is a man wearing a red short-sleeved shirt with his back to the camera.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans left. The camera focuses on the man on the right side of the frame. The man is talking and the woman nods occasionally. In the background, there are other people wearing red and white clothes walking around the exhibition area. There are more display panels and suspension system parts in the frame.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the man on the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "The man is talking and the woman nods occasionally.", "type": "action", "weight": 3 }, { "content": "In the background there are other people wearing red and white clothes walking around the exhibition area.", "type": "scene", "weight": 2 }, { "content": "There are more display boards and suspension system parts in the picture.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0843", "video_path": "VIDGEN-1M/nGmPD9xVdbM-Scene-0186.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 4.14, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at a sandy area. On the left side of the screen is a man wearing a khaki short-sleeved shirt and shorts, with a black wireless microphone receiver on his waist. The camera is shot from behind the man. The man is bent over, holding a branch in his right hand, with the end of the branch close to the ground. The background is dry sand, with some small stones and dry plants scattered on the ground. The sun shines on the sand, forming obvious shadows.\nThe camera moves to the right side of the screen, and a snake gradually becomes clear in the picture. The sand-colored snake presents a natural pattern, blending in with the surrounding environment. The man gently pokes the snake's body with a branch, and the snake begins to move slowly.\nFinally, the camera zooms out slightly, and the snake crawls to the right side of the frame, leaving a clear trail.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a sandy area. On the left side of the screen is a man wearing a khaki short-sleeved shirt and shorts, with a black wireless microphone receiver on his waist. The camera is shot from behind the man. The man is bent over, holding a branch in his right hand, with the end of the branch close to the ground. The background is dry sand, with some small stones and dry plants scattered on the ground. The sun shines on the sand, forming obvious shadows.", "visual_elements": [ { "content": "The camera is pointed at a sandy area.", "type": "camera", "weight": 3 }, { "content": "There is a man wearing a khaki short-sleeved shirt and shorts on the left side of the screen.", "type": "attribute", "weight": 3 }, { "content": "A black wireless microphone receiver is attached to the man's waist.", "type": "attribute", "weight": 2 }, { "content": "The camera is shot from behind the man.", "type": "camera", "weight": 2 }, { "content": "The man is bent over, holding a branch in his right hand.", "type": "action", "weight": 2 }, { "content": "The background is dry sand.", "type": "scene", "weight": 3 }, { "content": "Some small stones and dry plants are scattered on the ground.", "type": "scene", "weight": 2 }, { "content": "The sun shines on the sand, forming a clear shadow.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves to the right side of the screen, and a snake gradually becomes clear in the picture. The sand-colored snake presents a natural pattern, blending in with the surrounding environment. The man gently pokes the snake's body with a branch, and the snake begins to move slowly.", "visual_elements": [ { "content": "The camera moves to the right side of the screen.", "type": "camera", "weight": 3 }, { "content": "A snake gradually becomes clear in the picture.", "type": "attribute", "weight": 3 }, { "content": "The snake is similar in color to the ground and blends in with the surrounding environment.", "type": "attribute", "weight": 2 }, { "content": "The snake's body shows obvious patterns.", "type": "attribute", "weight": 1 }, { "content": "The man gently moves the snake's body with a branch.", "type": "action", "weight": 2 }, { "content": "The snake begins to move slowly.", "type": "action", "weight": 2 } ] }, { "event": "Finally, the camera zooms out slightly, and the snake crawls to the right side of the frame, leaving a clear trail.", "visual_elements": [ { "content": "Finally, the camera is slightly farther away.", "type": "camera", "weight": 3 }, { "content": "The snake crawls to the right side of the screen.", "type": "action", "weight": 2 }, { "content": "The snake leaves obvious traces.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0844", "video_path": "VIDGEN-1M/nJi2fnuNEcQ-Scene-0190.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "High-Dynamic", "domain": "Sports Activity", "duration": 6.77, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young black man on a skateboard. The man is wearing a yellow inner layer with black letters and a white transparent jacket with a blue zipper, black trousers and black shoes. The man has a beard and wears white-framed glasses. The man is skating on a smooth cement floor indoors. There are some yellow crash barriers and gray walls indoors. There are two gray rolling doors on the right side of the picture.\nThe camera moves to the left. The man turns his back to the camera at first, then makes a jump on his skateboard. The skateboard flips over on its side, and the man jumps to the ground, bends over and faces the camera. The camera pans left and right, focusing on the man's movements.\nAt the end of the video, the camera moves in on the man. The man bends down, lifts his left foot, and puts on his shoe with his right hand. The man smiles awkwardly at the camera.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young black man on a skateboard. The man is wearing a yellow inner layer with black letters and a white transparent jacket with a blue zipper, black trousers and black shoes. The man has a beard and wears white-framed glasses. The man is skating on a smooth cement floor indoors. There are some yellow crash barriers and gray walls indoors. There are two gray rolling doors on the right side of the picture.", "visual_elements": [ { "content": "The camera is focused on a young black man on a skateboard.", "type": "camera", "weight": 3 }, { "content": "The man is wearing a yellow top with black letters and a white transparent jacket with a blue zipper, black trousers and black shoes.", "type": "attribute", "weight": 3 }, { "content": "The man has a beard and wears white-framed glasses.", "type": "attribute", "weight": 2 }, { "content": "The man is skating on a smooth cement floor indoors.", "type": "action", "weight": 3 }, { "content": "There are some yellow crash barriers and gray walls indoors.", "type": "scene", "weight": 2 }, { "content": "There are two gray rolling doors on the right side of the screen.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the left. The man turns his back to the camera at first, then makes a jump on his skateboard. The skateboard flips over on its side, and the man jumps to the ground, bends over and faces the camera. The camera pans left and right, focusing on the man's movements.", "visual_elements": [ { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The man turns his back to the camera at first, then makes a jump on the skateboard.", "type": "action", "weight": 3 }, { "content": "The skateboard flips sideways on the ground, and the man jumps to the ground and bends down to face the camera.", "type": "action", "weight": 2 }, { "content": "The camera shakes left and right, focusing on the man's movements.", "type": "camera", "weight": 3 } ] }, { "event": "At the end of the video, the camera moves in on the man. The man bends down, lifts his left foot, and puts on his shoe with his right hand. The man smiles awkwardly at the camera.", "visual_elements": [ { "content": "At the end of the video, the camera moves toward the man.", "type": "camera", "weight": 3 }, { "content": "The man bends down, lifts his left foot, and puts on his shoes with his right hand.", "type": "action", "weight": 3 }, { "content": "The man smiles awkwardly at the camera.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0845", "video_path": "VIDGEN-1M/uDr184fafuo-Scene-0002.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 6.64, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white woman and a golden retriever puppy. The woman has golden hair that falls on her shoulders. The woman is wearing a red hoodie with a white and gray snowflake pattern. The woman is holding a golden retriever puppy. The puppy's hair is soft and fluffy, its ears are drooping, and its eyes are half closed. The background is a corner of a white wall.\nThe camera moves slightly upwards, keeping a close-up shot. The woman holds the puppy in her right hand and pokes the puppy's face with her left hand while talking. The woman wears a silver ring on her left ring finger. The camera shakes throughout the process.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white woman and a golden retriever puppy. The woman has golden hair that falls on her shoulders. The woman is wearing a red hoodie with a white and gray snowflake pattern. The woman is holding a golden retriever puppy. The puppy's hair is soft and fluffy, its ears are drooping, and its eyes are half closed. The background is a corner of a white wall.", "visual_elements": [ { "content": "The camera is focused on a young white woman and a golden retriever puppy.", "type": "camera", "weight": 3 }, { "content": "The woman has blonde hair that falls over her shoulders.", "type": "attribute", "weight": 2 }, { "content": "The woman is wearing a red hoodie with a white and grey snowflake pattern.", "type": "attribute", "weight": 2 }, { "content": "The woman is holding a golden retriever puppy.", "type": "action", "weight": 3 }, { "content": "The puppy has soft and fluffy hair, drooping ears, and half-closed eyes.", "type": "attribute", "weight": 2 }, { "content": "The background is the corner of a white wall.", "type": "scene", "weight": 3 } ] }, { "event": "The camera moves slightly upwards, keeping a close-up shot. The woman holds the puppy in her right hand and pokes the puppy's face with her left hand while talking. The woman wears a silver ring on her left ring finger. The camera shakes throughout the process.", "visual_elements": [ { "content": "The camera moves slightly upward.", "type": "camera", "weight": 3 }, { "content": "The camera remains in close-up.", "type": "camera", "weight": 3 }, { "content": "The woman holds the puppy in her right hand and pokes the puppy's face with her left hand while talking.", "type": "action", "weight": 3 }, { "content": "The woman wears a silver ring on her left ring finger.", "type": "attribute", "weight": 1 }, { "content": "The camera shakes throughout the process.", "type": "camera", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0846", "video_path": "VIDGEN-1M/vBMoxQaS4Xc-Scene-0169.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 13.51, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a camera shot from above looking down on a wooden platform with some tools and parts on top of it. A green front of the vehicle can be seen in the frame. The front wheels of the vehicle have been removed and the suspension and brake components are clearly visible.\nThe camera pans right while remaining shaking as a person in a black long-sleeved shirt picks up a wrench from the wooden platform and mounts the tool in both hands to begin the repair.\nThe camera moves slightly left to reveal more tools, including a metal disk containing various nuts and bolts, and a small camera unit.", "events": [ { "event": "The video begins with a camera shot from above looking down on a wooden platform with some tools and parts on top of it. A green front of the vehicle can be seen in the frame. The front wheels of the vehicle have been removed and the suspension and brake components are clearly visible.", "visual_elements": [ { "content": "The camera is looking down from above.", "type": "camera", "weight": 3 }, { "content": "There are some tools and parts on a wooden platform.", "type": "attribute", "weight": 2 }, { "content": "The front of the vehicle can be seen in green.", "type": "attribute", "weight": 3 }, { "content": "The front wheels of the vehicle have been removed.", "type": "attribute", "weight": 2 }, { "content": "The suspension and brake components are clearly visible.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans right while remaining shaking as a person in a black long-sleeved shirt picks up a wrench from the wooden platform and mounts the tool in both hands to begin the repair.", "visual_elements": [ { "content": "The camera pans to the right while remaining shaking.", "type": "camera", "weight": 3 }, { "content": "A person in a black long-sleeved shirt picks up a wrench from the wooden platform.", "type": "action", "weight": 3 }, { "content": "Both hands are fitted with tools to begin the repair.", "type": "action", "weight": 2 } ] }, { "event": "The camera moves slightly left to reveal more tools, including a metal disk containing various nuts and bolts, and a small camera unit.", "visual_elements": [ { "content": "The camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "More tools can be seen.", "type": "attribute", "weight": 2 }, { "content": "The tools include a metal disk containing various nuts and bolts, and a small camera unit.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0847", "video_path": "VIDGEN-1M/wXc1GK6FTkM-Scene-0021.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 15.72, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a white bucket filled with many small crustaceans. These crustaceans are oval in shape, mainly light brown and grayish white in color, with hard shells and obvious textures and arthroplasts on the surface. The background is a field full of weeds, with some sand and stones on the ground.\nThen, the camera moves down and focuses on the crustaceans in the bucket. A hand shakes the white bucket. Some crustaceans are upside down and swaying actively in the bucket.\nThen, the camera zooms up and a hand appears on the left side of the screen. A hand in a white sweater reaches into the bucket and picks up one of the crustaceans and puts it in the hand to show it. The camera focuses on the crustacean. The crustacean is held in the hand with its limbs facing up.\nFinally, the camera zooms in and out, and shakes left and right. The hand puts the crustacean into the bucket and then leaves the screen.", "events": [ { "event": "At the beginning of the video, the camera is focused on a white bucket filled with many small crustaceans. These crustaceans are oval in shape, mainly light brown and grayish white in color, with hard shells and obvious textures and arthroplasts on the surface. The background is a field full of weeds, with some sand and stones on the ground.", "visual_elements": [ { "content": "The camera is pointed at a white bucket.", "type": "camera", "weight": 3 }, { "content": "The bucket is filled with many small crustaceans.", "type": "attribute", "weight": 3 }, { "content": "The animals are oval in shape, and their colors are mainly light brown and grayish white.", "type": "attribute", "weight": 2 }, { "content": "The shell of the animal is hard, with obvious textures and arthropods on the surface.", "type": "attribute", "weight": 2 }, { "content": "The background is a piece of land covered with weeds.", "type": "scene", "weight": 3 }, { "content": "There are some sand and stones on the ground.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera moves down and focuses on the crustaceans in the bucket. A hand shakes the white bucket. Some crustaceans are upside down and swaying actively in the bucket.", "visual_elements": [ { "content": "Then, the camera moves downward.", "type": "camera", "weight": 3 }, { "content": "The camera is pointed at the animals in the bucket.", "type": "camera", "weight": 3 }, { "content": "A hand shakes the white bucket.", "type": "action", "weight": 2 }, { "content": "Some animals are upside down, shaking actively in the bucket.", "type": "action", "weight": 1 } ] }, { "event": "Then, the camera zooms up and a hand appears on the left side of the screen. A hand in a white sweater reaches into the bucket and picks up one of the crustaceans and puts it in the hand to show it. The camera focuses on the crustacean. The crustacean is held in the hand with its limbs facing up.", "visual_elements": [ { "content": "Then, the camera pulls up.", "type": "camera", "weight": 3 }, { "content": "A hand in a white sweater reaches into the bucket.", "type": "attribute", "weight": 3 }, { "content": "The hand takes one of the small animals out of the bucket.", "type": "action", "weight": 3 }, { "content": "The camera is pointed at the small animal taken out of the bucket.", "type": "camera", "weight": 2 }, { "content": "The small animal is held in the hand with its limbs facing up.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera zooms in and out, and shakes left and right. The hand puts the crustacean into the bucket and then leaves the screen.", "visual_elements": [ { "content": "Finally, the camera zooms in and out.", "type": "camera", "weight": 3 }, { "content": "The camera shakes left and right.", "type": "camera", "weight": 2 }, { "content": "The hand puts the animal into the bucket and leaves the screen.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0848", "video_path": "VIDGEN-1M/zv3RPw63bEY-Scene-0023.mp4", "video_source": "VIDGEN-1M", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 8.48, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a young white man. He is standing in the kitchen, wearing a green long-sleeved shirt. In the background, white cabinets and several small paintings on the wall can be seen. There is also a white electric kettle and some kitchen utensils on the kitchen countertop.\nThen, the camera zooms in and moves slightly to the right. In the frame, the man holds the pizza box up to his chest. His left hand grasps the bottom of the box, and his right hand is placed on top of the box, showing the pictures and text descriptions on the pizza packaging to the camera.\nThe camera shakes slightly. The man steps back a little and is about to place the pizza box on the kitchen countertop with both hands.", "events": [ { "event": "At the beginning of the video, the camera is focused on a young white man. He is standing in the kitchen, wearing a green long-sleeved shirt. In the background, white cabinets and several small paintings on the wall can be seen. There is also a white electric kettle and some kitchen utensils on the kitchen countertop.", "visual_elements": [ { "content": "The camera is focused on a young white man.", "type": "camera", "weight": 3 }, { "content": "The man is standing in the kitchen, wearing a green long-sleeved shirt.", "type": "scene", "weight": 3 }, { "content": "In the background, white cabinets and several small paintings on the wall can be seen.", "type": "scene", "weight": 2 }, { "content": "There is also a white electric kettle and some kitchen utensils on the kitchen countertop.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera zooms in and moves slightly to the right. In the frame, the man holds the pizza box up to his chest. His left hand grasps the bottom of the box, and his right hand is placed on top of the box, showing the pictures and text descriptions on the pizza packaging to the camera.", "visual_elements": [ { "content": "The camera zooms in and moves slightly to the right.", "type": "camera", "weight": 3 }, { "content": "In the frame, the man holds the pizza box up to his chest.", "type": "action", "weight": 3 }, { "content": "The man's left hand grasps the bottom of the box.", "type": "action", "weight": 3 }, { "content": "The man's right hand is placed on top of the box.", "type": "action", "weight": 3 }, { "content": "The man is showing the pictures and text descriptions on the pizza packaging to the camera.", "type": "action", "weight": 3 } ] }, { "event": "The camera shakes slightly. The man steps back a little and is about to place the pizza box on the kitchen countertop with both hands.", "visual_elements": [ { "content": "The camera shakes slightly.", "type": "camera", "weight": 3 }, { "content": "The man steps back a little.", "type": "action", "weight": 1 }, { "content": "The man is about to place the pizza box on the kitchen countertop with both hands.", "type": "action", "weight": 1 } ] } ], "n_events": 3, "n_elements": 12 }, { "index": "TUNA_0849", "video_path": "MiraData/video_clips/000005028/000005028787.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.56, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a wet street. In the background, several people are walking, wearing a variety of clothing, carrying shopping bags and pushing baby strollers. The camera remains static, with the perspective low and close to the ground.\nNext, a duck enters the frame from the right, slowly walking on the wet ground. The duck has brightly colored feathers, with a dark green head, brown breast, white belly, and orange paws. It walks slowly and steadily, with its head bobbing slightly. The background is slightly blurred. The people in the background continue to walk, and there is a pigeon in the background. The camera pans slightly to the left as the duck moves, keeping the duck in the center of the frame.\nThen, another identical duck enters the frame from the left and walks in front. On both sides of the duck, casually dressed pedestrians pass by, some wearing jeans and sneakers, some wearing coats and boots. People in the background are still walking, and the camera continues to pan, following the movement of the duck.\nFinally, the camera pans to the left again. The duck on the left side of the screen is blocked by a black column and disappears from the screen.", "events": [ { "event": "The video begins with the camera focused on a wet street. In the background, several people are walking, wearing a variety of clothing, carrying shopping bags and pushing baby strollers. The camera remains static, with the perspective low and close to the ground.", "visual_elements": [ { "content": "The video begins with the camera focused on a wet street.", "type": "scene", "weight": 2 }, { "content": "In the background, several people are walking.", "type": "scene", "weight": 2 }, { "content": "People are wearing a variety of clothing.", "type": "attribute", "weight": 1 }, { "content": "Some people are carrying shopping bags, some people are pushing baby strollers.", "type": "action", "weight": 1 }, { "content": "The camera remains static.", "type": "camera", "weight": 3 }, { "content": "The camera angle is low, close to the ground.", "type": "camera", "weight": 3 } ] }, { "event": "Next, a duck enters the frame from the right, slowly walking on the wet ground. The duck has brightly colored feathers, with a dark green head, brown breast, white belly, and orange paws. It walks slowly and steadily, with its head bobbing slightly. The background is slightly blurred. The people in the background continue to walk, and there is a pigeon in the background. The camera pans slightly to the left as the duck moves, keeping the duck in the center of the frame.", "visual_elements": [ { "content": "A duck enters the frame from the right.", "type": "action", "weight": 3 }, { "content": "The duck has dark green head.", "type": "attribute", "weight": 1 }, { "content": "The duck has brown breast.", "type": "attribute", "weight": 1 }, { "content": "The duck has white belly.", "type": "attribute", "weight": 1 }, { "content": "The duck has orange paws.", "type": "attribute", "weight": 1 }, { "content": "The duck walks slowly and steadily, with its head bobbing slightly.", "type": "action", "weight": 2 }, { "content": "The background is slightly blurred.", "type": "scene", "weight": 3 }, { "content": "The people in the background continue to walk.", "type": "scene", "weight": 2 }, { "content": "There is a pigeon in the background.", "type": "scene", "weight": 1 }, { "content": "The camera pans slightly to the left as the duck moves.", "type": "camera", "weight": 3 } ] }, { "event": "Then, another identical duck enters the frame from the left and walks in front. On both sides of the duck, casually dressed pedestrians pass by, some wearing jeans and sneakers, some wearing coats and boots. People in the background are still walking, and the camera continues to pan, following the movement of the duck.", "visual_elements": [ { "content": "Then, another identical duck enters the camera from the left.", "type": "action", "weight": 3 }, { "content": "The duck on the left side of the screen walks in front.", "type": "action", "weight": 3 }, { "content": "There are casually dressed pedestrians passing by on both sides of the duck.", "type": "action", "weight": 2 }, { "content": "The camera continues to pan to the left following the movement of the duck.", "type": "camera", "weight": 3 } ] }, { "event": "Finally, the camera pans to the left again. The duck on the left side of the screen is blocked by a black column and disappears from the screen.", "visual_elements": [ { "content": "Finally, the camera pans to the left again.", "type": "scene", "weight": 3 }, { "content": "The duck on the left side of the screen is blocked by a black pillar and disappears from the screen.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0850", "video_path": "MiraData/video_clips/000005028/000005028790.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a black and white dog with white legs and black spots. The dog has a blue leash tied around its neck. The dog sticks out its tongue. The camera is focused on the dog's head from the side, with a green meadow and some withered grass in the background.\nAs the video progresses, the camera follows the dog to the right of the screen, focusing on the dog's head from the side. The leash occasionally disappears from the screen. The dog keeps lowering its head to sniff the ground.\nThe dog's steps are steady, with its nose almost touching the ground. The dog's mouth is slightly open, and its tongue occasionally sticks out to pant. The camera moves to the right, occasionally showing the dog's front legs and part of its body.\nAt the end of the video, the camera still follows the dog's steps, and the grass in the background gradually becomes sparse, and the dirt and pebbles on the ground are clearly visible. The dog raises his head and walks forward vigorously.", "events": [ { "event": "At the beginning of the video, the camera is focused on a black and white dog with white legs and black spots. The dog has a blue leash tied around its neck. The dog sticks out its tongue. The camera is focused on the dog's head from the side, with a green meadow and some withered grass in the background.", "visual_elements": [ { "content": "The camera is focused on a black and white dog.", "type": "camera", "weight": 3 }, { "content": "The dog's legs are white with black spots.", "type": "attribute", "weight": 1 }, { "content": "A blue leash is tied around the dog's neck.", "type": "attribute", "weight": 2 }, { "content": "The dog sticks out its tongue.", "type": "action", "weight": 3 }, { "content": "The camera is focused on the dog's head from the side.", "type": "camera", "weight": 3 }, { "content": "The background is a green meadow and some yellow grass.", "type": "scene", "weight": 3 } ] }, { "event": "As the video progresses, the camera follows the dog to the right of the screen, focusing on the dog's head from the side. The leash occasionally disappears from the screen. The dog keeps lowering its head to sniff the ground.", "visual_elements": [ { "content": "The camera follows the dog to the right of the screen, focusing on the side of the dog from the side.", "type": "camera", "weight": 3 }, { "content": "The leash occasionally disappears from the screen.", "type": "camera", "weight": 1 }, { "content": "The dog keeps sniffing the ground.", "type": "action", "weight": 3 } ] }, { "event": "The dog's steps are steady, with its nose almost touching the ground. The dog's mouth is slightly open, and its tongue occasionally sticks out to pant. The camera moves to the right, occasionally showing the dog's front legs and part of its body.", "visual_elements": [ { "content": "The dog's steps are steady, with its nose almost touching the ground.", "type": "action", "weight": 3 }, { "content": "The dog's mouth is slightly open, and it occasionally sticks out its tongue to pant.", "type": "action", "weight": 2 }, { "content": "The camera moves to the right, occasionally showing the dog's front legs and part of its body.", "type": "camera", "weight": 2 } ] }, { "event": "At the end of the video, the camera still follows the dog's steps, and the grass in the background gradually becomes sparse, and the dirt and pebbles on the ground are clearly visible. The dog raises his head and walks forward vigorously.", "visual_elements": [ { "content": "At the end of the video, the camera still follows the dog's steps.", "type": "camera", "weight": 3 }, { "content": "The grass in the background gradually becomes sparse.", "type": "scene", "weight": 2 }, { "content": "The dirt and pebbles on the ground appear in the background.", "type": "scene", "weight": 1 }, { "content": "The dog raises its head and walks forward vigorously.", "type": "action", "weight": 3 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0851", "video_path": "MiraData/video_clips/000005028/000005028811.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 22.0, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a body of water. The water surface is sparkling, and three white seagulls are swimming on it. On the right side of the screen is a concrete platform with a row of black metal railings on the edge of the platform. A row of seagulls are standing on the railings, and one of them is a larger, darker seagull standing at the outermost of the railings. Other smaller seagulls are standing in different positions on the railings. The seagulls have black tails and white bodies.\nFrom the middle of the video to the end, the camera moves slightly to the left. A seagull in the middle of the team flies up from the water and flies to the left side of the screen. Another seagull on the right side of the screen also flies to the left side of the screen. The seagulls on the railing remain in place, and occasionally a few seagulls lower their heads or turn their heads to preen their feathers. In the background, ripples appear on the water as the seagulls fly away.", "events": [ { "event": "At the beginning of the video, the camera is focused on a body of water. The water surface is sparkling, and three white seagulls are swimming on it. On the right side of the screen is a concrete platform with a row of black metal railings on the edge of the platform. A row of seagulls are standing on the railings, and one of them is a larger, darker seagull standing at the outermost of the railings. Other smaller seagulls are standing in different positions on the railings. The seagulls have black tails and white bodies.", "visual_elements": [ { "content": "The camera is pointed at a sparkling water surface.", "type": "camera", "weight": 3 }, { "content": "There are three white seagulls swimming on the water surface.", "type": "action", "weight": 3 }, { "content": "On the right side of the picture is a cement platform.", "type": "scene", "weight": 3 }, { "content": "There is a row of black metal railings on the edge of the platform.", "type": "scene", "weight": 3 }, { "content": "A row of seagulls stand on the railings.", "type": "attribute", "weight": 3 }, { "content": "A larger and darker seagull stands on the outermost.", "type": "attribute", "weight": 2 }, { "content": "Several other smaller seagulls stand in different positions on the railings.", "type": "attribute", "weight": 2 }, { "content": "The seagulls have black tails and white bodies.", "type": "attribute", "weight": 1 } ] }, { "event": "From the middle of the video to the end, the camera moves slightly to the left. A seagull in the middle of the team flies up from the water and flies to the left side of the screen. Another seagull on the right side of the screen also flies to the left side of the screen. The seagulls on the railing remain in place, and occasionally a few seagulls lower their heads or turn their heads to preen their feathers. In the background, ripples appear on the water as the seagulls fly away.", "visual_elements": [ { "content": "The camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "A seagull in the middle of the team flies up from the water and flies to the left side of the picture.", "type": "action", "weight": 2 }, { "content": "A seagull on the right side of the picture also flies to the left side of the picture.", "type": "action", "weight": 2 }, { "content": "Ripples appear on the water surface in the background as the seagull flies away.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0852", "video_path": "MiraData/video_clips/000005028/000005028824.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 25.24, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a rabbit. The rabbit is eating grass with its head down. The rabbit's fur is a mixture of brown and light brown, with subtle black spots on its body. It has long, erect ears, large, dark eyes, and very soft fur. The rabbit's fur and grass are gently blown by the wind. The background is a blurry green grass.\nAs time passes, the camera moves slightly to the left, and the rabbit's body moves with it. Then, the rabbit suddenly stops eating grass and raises its head. The rabbit's eyes open wider and its body leans forward slightly. The rabbit's face is more clearly visible in the picture, and its nose and whiskers tremble slightly.", "events": [ { "event": "The video begins with the camera focusing on a rabbit. The rabbit is eating grass with its head down. The rabbit's fur is a mixture of brown and light brown, with subtle black spots on its body. It has long, erect ears, large, dark eyes, and very soft fur. The rabbit's fur and grass are gently blown by the wind. The background is a blurry green grass.", "visual_elements": [ { "content": "The camera is focused on a rabbit.", "type": "camera", "weight": 3 }, { "content": "The rabbit is eating grass with its head down.", "type": "action", "weight": 3 }, { "content": "The rabbit's fur is a mixture of brown and light brown, with subtle black spots on its body.", "type": "attribute", "weight": 3 }, { "content": "The rabbit's ears are long and upright, and its eyes are large and bright.", "type": "attribute", "weight": 2 }, { "content": "The rabbit's fur is very soft.", "type": "attribute", "weight": 1 }, { "content": "The rabbit's fur and grass are gently blown by the wind.", "type": "action", "weight": 1 }, { "content": "The background is a blurry green grass.", "type": "scene", "weight": 3 } ] }, { "event": "As time passes, the camera moves slightly to the left, and the rabbit's body moves with it. Then, the rabbit suddenly stops eating grass and raises its head. The rabbit's eyes open wider and its body leans forward slightly. The rabbit's face is more clearly visible in the picture, and its nose and whiskers tremble slightly.", "visual_elements": [ { "content": "As time goes by, the camera moves slightly to the left.", "type": "camera", "weight": 3 }, { "content": "The rabbit's body also moves to the left of the frame.", "type": "action", "weight": 3 }, { "content": "The rabbit suddenly stops eating grass and raises its head.", "type": "action", "weight": 3 }, { "content": "The rabbit's eyes open wider and its body leans forward slightly.", "type": "action", "weight": 1 }, { "content": "The rabbit's face is clearly visible, with its nose and whiskers trembling slightly.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0853", "video_path": "MiraData/video_clips/000005028/000005028913.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 29.74, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera looks down from a high place, shooting a clear blue-green seawater. There is a large marine creature moving slowly in the picture. It is huge, smooth and has a black shadow. As the creature moves, ripples and foam appear on the water surface.\nAs the camera moves to the right, the black shadow gradually becomes clear, and it is a huge whale. The whale moves slowly, sometimes showing its head, spraying water mist, forming a small water column, and then sinking into the water. Sometimes it swings its fins in the water, stirring up waves of water.\nThe camera continues to move to the upper right, and the head and back of the whale gradually become clear, with some white spots on the head.\nFinally, the camera stabilizes. After swimming in the water for a while, the whale begins to dive slowly, and the black shadow gradually disappears in the water. The water surface gradually calms down, leaving only slight ripples and a few circles of ripples. In the background, the color of the water gradually changes from dark blue to light green under the light.", "events": [ { "event": "At the beginning of the video, the camera looks down from a high place, shooting a clear blue-green seawater. There is a large marine creature moving slowly in the picture. It is huge, smooth and has a black shadow. As the creature moves, ripples and foam appear on the water surface.", "visual_elements": [ { "content": "At the beginning of the video, the camera looks down from a high altitude.", "type": "camera", "weight": 3 }, { "content": "The camera captures a clear blue-green seawater.", "type": "camera", "weight": 3 }, { "content": "There is a large marine creature moving slowly in the center of the screen.", "type": "action", "weight": 3 }, { "content": "The creature is huge, with a smooth surface and a black shadow.", "type": "attribute", "weight": 3 }, { "content": "As the creature moves, ripples and bubbles appear on the water surface.", "type": "scene", "weight": 2 } ] }, { "event": "As the camera moves to the right, the black shadow gradually becomes clear, and it is a huge whale. The whale moves slowly, sometimes showing its head, spraying water mist, forming a small water column, and then sinking into the water. Sometimes it swings its fins in the water, stirring up waves of water.", "visual_elements": [ { "content": "As the camera moves to the right, the black shadow gradually becomes clear.", "type": "camera", "weight": 3 }, { "content": "The black shadow is a whale.", "type": "attribute", "weight": 3 }, { "content": "The whale moves slowly, sometimes showing its head, spraying water mist, forming a small water column, and then sinking into the water.", "type": "action", "weight": 2 }, { "content": "The whale sometimes swings its fins in the water, stirring up waves of water.", "type": "action", "weight": 2 } ] }, { "event": "The camera continues to move to the upper right, and the head and back of the whale gradually become clear, with some white spots on the head.", "visual_elements": [ { "content": "The camera continues to move to the upper right, and the whale's head and back gradually become clear.", "type": "camera", "weight": 2 }, { "content": "There are some white spots on the whale's head.", "type": "attribute", "weight": 1 } ] }, { "event": "Finally, the camera stabilizes. After swimming in the water for a while, the whale begins to dive slowly, and the black shadow gradually disappears in the water. The water surface gradually calms down, leaving only slight ripples and a few circles of ripples. In the background, the color of the water gradually changes from dark blue to light green under the light.", "visual_elements": [ { "content": "Finally, the camera stabilizes.", "type": "camera", "weight": 3 }, { "content": "After swimming in the water for a while, the whale begins to slowly dive.", "type": "action", "weight": 2 }, { "content": "The black shadow gradually disappears in the water.", "type": "attribute", "weight": 2 }, { "content": "The water surface gradually calms down, leaving only slight ripples and a few circles of ripples.", "type": "scene", "weight": 2 }, { "content": "In the background, the color of the water changes from dark blue to light green under the light.", "type": "scene", "weight": 3 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0854", "video_path": "MiraData/video_clips/000005028/000005028968.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.72, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a row of pigeons perched on a stone wall. The wall is slightly old, with some green moss on the surface. Some pigeons lower their heads, some raise their heads, and occasionally a few pigeons move in place. The pigeons' feathers are a mixture of gray and black. Some pigeons have obvious purple and green on their chests, and their feet are orange-red. The background is a classic white building. The window frames of the building's windows are white, with light green curtains inside. There are several flags on the building fluttering in the wind.\nAs the video progresses, the camera slowly moves to the left, and more pigeons enter the frame. Most of these pigeons are gray and white, and some have black spots. The pigeons stand on the stone wall and look at different places.\nThen, as the camera moves to the left side of the frame, the pigeons begin to become more active. The pigeons continue to stand or move on the stone wall. Several of the pigeons are close to each other and interacting. Tree with small green leaves appear in the background.", "events": [ { "event": "At the beginning of the video, the camera is focused on a row of pigeons perched on a stone wall. The wall is slightly old, with some green moss on the surface. Some pigeons lower their heads, some raise their heads, and occasionally a few pigeons move in place. The pigeons' feathers are a mixture of gray and black. Some pigeons have obvious purple and green on their chests, and their feet are orange-red. The background is a classic white building. The window frames of the building's windows are white, with light green curtains inside. There are several flags on the building fluttering in the wind.", "visual_elements": [ { "content": "The camera is pointed at a row of pigeons perched on a stone wall.", "type": "camera", "weight": 3 }, { "content": "The wall is slightly old, with some green moss on the surface.", "type": "scene", "weight": 3 }, { "content": "Some of the pigeons lower their heads, some raise their heads, and occasionally a few pigeons move in place.", "type": "action", "weight": 2 }, { "content": "The pigeons' feathers are a mixture of gray and black.", "type": "attribute", "weight": 3 }, { "content": "Some pigeons have obvious purple and green on their chests, and their feet are orange-red.", "type": "attribute", "weight": 2 }, { "content": "The background is a classic white building.", "type": "scene", "weight": 2 }, { "content": "The window frames of the building's windows are white, with light green curtains inside.", "type": "scene", "weight": 1 }, { "content": "There are several flags on the building fluttering in the wind.", "type": "scene", "weight": 1 } ] }, { "event": "As the video progresses, the camera slowly moves to the left, and more pigeons enter the frame. Most of these pigeons are gray and white, and some have black spots. The pigeons stand on the stone wall and look at different places.", "visual_elements": [ { "content": "As the video progresses, the camera slowly moves to the left, and more pigeons enter the picture.", "type": "camera", "weight": 3 }, { "content": "Most of these pigeons are grayish white, and some have black spots.", "type": "attribute", "weight": 2 }, { "content": "The pigeons stand on the stone wall and look at different places.", "type": "action", "weight": 1 } ] }, { "event": "Then, as the camera moves to the left side of the frame, the pigeons begin to become more active. The pigeons continue to stand or move on the stone wall. Several of the pigeons are close to each other and interacting. Tree with small green leaves appear in the background.", "visual_elements": [ { "content": "The camera then moves to the left side of the picture.", "type": "camera", "weight": 3 }, { "content": "The pigeons continue to stand or move on the stone wall.", "type": "action", "weight": 2 }, { "content": "Tree with small green leaves appear in the background.", "type": "scene", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0855", "video_path": "MiraData/video_clips/000005028/000005028973.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 23.28, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at a slightly old stone wall with several pigeons on it. The pigeons are walking on the wall, some lowering their heads to peck at food, while others stand still. The pigeons' feathers are a mixture of gray and black, and their feet are orange-red. The pigeons are medium-sized and agile. The background is a classical white building with solemn white stone columns and arches. There are trees with tender green leaves on both sides of the building.\nThe camera moves to the right, and the number of pigeons gradually increases. Some of the pigeons spread their wings and fly away. In the background, there are stone carvings on the towering columns.\nAs the video draws to a close, a few pigeons take off again and fly to the left of the camera. The remaining pigeons continue to move around on the wall, some lowering their heads to peck at food, while others stand still. Tree branches with tiny green leaves appear in front of the stone wall on the right side of the camera.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a slightly old stone wall with several pigeons on it. The pigeons are walking on the wall, some lowering their heads to peck at food, while others stand still. The pigeons' feathers are a mixture of gray and black, and their feet are orange-red. The pigeons are medium-sized and agile. The background is a classical white building with solemn white stone columns and arches. There are trees with tender green leaves on both sides of the building.", "visual_elements": [ { "content": "The camera is pointed at a slightly old stone wall.", "type": "camera", "weight": 3 }, { "content": "There are several pigeons on the wall.", "type": "attribute", "weight": 3 }, { "content": "The pigeons are walking on the wall, some are pecking at food with their heads down, and some are standing still.", "type": "action", "weight": 3 }, { "content": "The pigeons' feathers are a mixture of gray and black, and their feet are orange-red.", "type": "attribute", "weight": 2 }, { "content": "The pigeons are of medium size and agile.", "type": "attribute", "weight": 1 }, { "content": "The background is a white building in classical style.", "type": "scene", "weight": 3 }, { "content": "The building has solemn white stone columns and arches.", "type": "scene", "weight": 3 }, { "content": "There are tender green leaves of trees on both sides of the building.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the right, and the number of pigeons gradually increases. Some of the pigeons spread their wings and fly away. In the background, there are stone carvings on the towering columns.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The number of pigeons gradually increases.", "type": "attribute", "weight": 3 }, { "content": "Some pigeons spread their wings and fly away.", "type": "action", "weight": 2 }, { "content": "In the background, there are stone carvings on the towering columns.", "type": "scene", "weight": 2 } ] }, { "event": "As the video draws to a close, a few pigeons take off again and fly to the left of the camera. The remaining pigeons continue to move around on the wall, some lowering their heads to peck at food, while others stand still. Tree branches with tiny green leaves appear in front of the stone wall on the right side of the camera.", "visual_elements": [ { "content": "Towards the end of the video, several pigeons fly up again and fly to the left side of the camera.", "type": "action", "weight": 3 }, { "content": "The remaining pigeons continue to move on the wall, some are pecking at food with their heads down, and some are standing still.", "type": "action", "weight": 2 }, { "content": "The branches of trees with small green leaves appear in front of the stone wall on the right side of the camera.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0856", "video_path": "MiraData/video_clips/000005046/000005046470.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 25.88, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a dense green bush with heavy foliage obscuring most of the view. The camera focuses on the foliage of the bush, which is dimly lit.\nThe camera then moves slowly to the left, gradually revealing the edge of the bush. The grassy area is covered with yellowish grass and appears somewhat desolate. A few bare trees are in the distance with no leaves on their branches.\nThe camera eventually stops at this wide-open vista. Trees line up in the distance.", "events": [ { "event": "The video begins with a dense green bush with heavy foliage obscuring most of the view. The camera focuses on the foliage of the bush, which is dimly lit.", "visual_elements": [ { "content": "The scene is a thick green bush.", "type": "scene", "weight": 3 }, { "content": "The foliage is so thick that it obscures most of the view.", "type": "attribute", "weight": 2 }, { "content": "The camera focuses on the foliage of the bush.", "type": "camera", "weight": 3 }, { "content": "The light is dim.", "type": "scene", "weight": 2 } ] }, { "event": "The camera then moves slowly to the left, gradually revealing the edge of the bush. The grassy area is covered with yellowish grass and appears somewhat desolate. A few bare trees are in the distance with no leaves on their branches.", "visual_elements": [ { "content": "The camera then moves slowly to the left", "type": "camera", "weight": 3 }, { "content": "The grassy area is covered with yellowish grass and appears somewhat desolate.", "type": "attribute", "weight": 2 }, { "content": "A few bare trees are in the distance with no leaves on their branches.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera eventually stops at this wide-open vista. Trees line up in the distance.", "visual_elements": [ { "content": "The camera eventually stops at this wide-open vista.", "type": "camera", "weight": 3 }, { "content": "Trees line up in the distance.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 9 }, { "index": "TUNA_0857", "video_path": "MiraData/video_clips/000005046/000005046516.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 24.84, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a blurry green background. The camera moves to the right, the image becomes clearer, the light becomes brighter, and the focus is on the leaves of the plant, which have clearly visible veins.\nThen the camera is fixed in its current position and the light shines on the leaves as the light continues to change.", "events": [ { "event": "The video begins with the camera focusing on a blurry green background. The camera moves to the right, the image becomes clearer, the light becomes brighter, and the focus is on the leaves of the plant, which have clearly visible veins.", "visual_elements": [ { "content": "The camera focusing on a blurry green background.", "type": "camera", "weight": 3 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The image becomes clearer.", "type": "scene", "weight": 2 }, { "content": "The light becomes brighter", "type": "scene", "weight": 2 }, { "content": "The focus is on the leaves of the plant", "type": "attribute", "weight": 3 }, { "content": "The leaves of the plant haveclearly visible veins", "type": "attribute", "weight": 2 } ] }, { "event": "Then the camera is fixed in its current position and the light shines on the leaves as the light continues to change.", "visual_elements": [ { "content": "The camera is fixed in its current position", "type": "camera", "weight": 3 }, { "content": "The light shines on the leaves", "type": "attribute", "weight": 2 }, { "content": "The light continues to change.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0858", "video_path": "MiraData/video_clips/000005046/000005046526.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.76, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at the ground from a low angle. There is moist dark brown soil on the ground, mixed with some plant debris and small pieces of wood chips. The background is a yellow wooden floor. The picture is slightly blurred and then gradually becomes clear.\nAs the camera pans to the right, a small black beetle appears in the frame, slowly crawling in from the right side of the frame, through the rotting wood chips and dirt.\nNext, the camera quickly moves to the right. The ground in the picture is wider, with more rotten wood chips and soil on the ground. At this time, another black beetle appears in the picture, enters the picture from the right, and continues to crawl to the right front.\nAt the end of the video, the two beetles disappear from the top of the screen.", "events": [ { "event": "At the beginning of the video, the camera is pointed at the ground from a low angle. There is moist dark brown soil on the ground, mixed with some plant debris and small pieces of wood chips. The background is a yellow wooden floor. The picture is slightly blurred and then gradually becomes clear.", "visual_elements": [ { "content": "The video starts with the camera pointing at the ground from a low angle.", "type": "camera", "weight": 3 }, { "content": "There is moist dark brown soil on the ground.", "type": "scene", "weight": 3 }, { "content": "There are some plant debris and small pieces of sawdust in the soil.", "type": "scene", "weight": 2 }, { "content": "The background is a yellow wooden floor.", "type": "scene", "weight": 2 }, { "content": "The picture is slightly blurred, and then gradually becomes clear.", "type": "camera", "weight": 3 } ] }, { "event": "As the camera pans to the right, a small black beetle appears in the frame, slowly crawling in from the right side of the frame, through the rotting wood chips and dirt.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "A small black beetle appears in the picture.", "type": "attribute", "weight": 3 }, { "content": "The beetle slowly crawls in from the right side of the picture, through the rotten sawdust and soil.", "type": "action", "weight": 3 } ] }, { "event": "Next, the camera quickly moves to the right. The ground in the picture is wider, with more rotten wood chips and soil on the ground. At this time, another black beetle appears in the picture, enters the picture from the right, and continues to crawl to the right front.", "visual_elements": [ { "content": "The camera then moves quickly to the right.", "type": "camera", "weight": 3 }, { "content": "The ground in the picture is wider, with more rotten sawdust and soil on the ground.", "type": "scene", "weight": 2 }, { "content": "At this time, another black beetle appears in the picture.", "type": "attribute", "weight": 3 }, { "content": "The beetle enters the picture from the right.", "type": "action", "weight": 3 }, { "content": "The beetle crawls to the right front.", "type": "action", "weight": 2 } ] }, { "event": "At the end of the video, the two beetles disappear from the top of the screen.", "visual_elements": [ { "content": "At the end of the video, the two beetles disappear at the top of the picture.", "type": "attribute", "weight": 3 } ] } ], "n_events": 4, "n_elements": 14 }, { "index": "TUNA_0859", "video_path": "MiraData/video_clips/000005046/000005046561.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 25.47, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a blurry dark green background, and the camera moves down to reveal a light green meadow and a sea of red flowers.\nThe camera fixes on a sea of red flowers with unopened buds and blooming brightly colored flowers swaying in the wind.", "events": [ { "event": "The video begins with a blurry dark green background, and the camera moves down to reveal a light green meadow and a sea of red flowers.", "visual_elements": [ { "content": "The frame is a blurry dark green background", "type": "scene", "weight": 3 }, { "content": "The camera moves down to reveal a light green meadow and a sea of red flowers.", "type": "camera", "weight": 3 } ] }, { "event": "The camera fixes on a sea of red flowers with unopened buds and blooming brightly colored flowers swaying in the wind.", "visual_elements": [ { "content": "The camera fixes on a sea of red flowers.", "type": "camera", "weight": 3 }, { "content": "Unopened buds and blooming brightly colored flowers are swaying in the wind.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 4 }, { "index": "TUNA_0860", "video_path": "MiraData/video_clips/000005046/000005046579.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 20.52, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a green leaf, with veins and surface textures clearly visible. As the camera gradually pulls back, the overall shape of the leaf begins to emerge, with parts of the leaf gradually becoming blurred from back to front.", "events": [ { "event": "The video begins with the camera focusing on a green leaf, with veins and surface textures clearly visible. As the camera gradually pulls back, the overall shape of the leaf begins to emerge, with parts of the leaf gradually becoming blurred from back to front.", "visual_elements": [ { "content": "The camera focusing on a green leaf.", "type": "camera", "weight": 3 }, { "content": "The veins and surface textures are clearly visible.", "type": "attribute", "weight": 2 }, { "content": "As the camera gradually pulls back.", "type": "camera", "weight": 3 }, { "content": "The overall shape of the leaf begins to emerge.", "type": "attribute", "weight": 2 }, { "content": "Parts of the leaf gradually becoming blurred from back to front.", "type": "attribute", "weight": 2 } ] } ], "n_events": 1, "n_elements": 5 }, { "index": "TUNA_0861", "video_path": "MiraData/video_clips/000005046/000005046599.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 22.8, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a field with obvious furrows in the ground. Soil and grass are staggered. The camera moves upward to reveal more views of the field, which is planted with neatly arranged green crops.\nThe camera continues to move upwards and the view gradually widens as more fields and distant objects enter the frame. To the right of the field is a vast area of green crops, while to the left is a dirt path with a few scattered patches of grass. A mound of piled up dirt appears in the distance and a wooded area can be had to in the background.\nThe camera stops moving, and a clear sky is in the upper part of the frame, with piles of dirt and rows of trees, and an expansive field in the lower part of the frame.", "events": [ { "event": "The video begins with a field with obvious furrows in the ground. Soil and grass are staggered. The camera moves upward to reveal more views of the field, which is planted with neatly arranged green crops.", "visual_elements": [ { "content": "The image is of a field.", "type": "scene", "weight": 2 }, { "content": "There are obvious furrows in the ground.", "type": "attribute", "weight": 2 }, { "content": "Soil and grass are staggered.", "type": "scene", "weight": 2 }, { "content": "The camera moves upward to reveal more views of the field.", "type": "camera", "weight": 3 }, { "content": "The field is planted with neatly arranged green crops.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move upwards and the view gradually widens as more fields and distant objects enter the frame. To the right of the field is a vast area of green crops, while to the left is a dirt path with a few scattered patches of grass. A mound of piled up dirt appears in the distance and a wooded area can be had to in the background.", "visual_elements": [ { "content": "The camera continues to move upwards.", "type": "camera", "weight": 3 }, { "content": "The view gradually widens.", "type": "scene", "weight": 2 }, { "content": "Fields and distant objects enter the frame.", "type": "scene", "weight": 2 }, { "content": "To the right of the field is a vast area of green crops.", "type": "attribute", "weight": 2 }, { "content": "A pile of mounded earth appears in the distance.", "type": "scene", "weight": 2 }, { "content": "Next to the mound are some bushes.", "type": "attribute", "weight": 2 }, { "content": "There's a forest in the background.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera stops moving, and a clear sky is in the upper part of the frame, with piles of dirt and rows of trees, and an expansive field in the lower part of the frame.", "visual_elements": [ { "content": "The camera stops moving.", "type": "camera", "weight": 3 }, { "content": "A clear sky is in the upper part of the frame.", "type": "scene", "weight": 3 }, { "content": "Piles of dirt and rows of trees, and an expansive field in the lower part of the frame.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0862", "video_path": "MiraData/video_clips/000005046/000005046617.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 29.17, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on some green plant leaves and black gravel on the ground. The edges of the leaves are slightly curled and have water droplets on the surface. There are branches interspersed between the leaves.\nThe camera slowly moves to the left, gradually approaching the area under a leaf. Then a beetle slowly crawls out from under the leaf, with a smooth and shiny body. The beetle has slender antennae and legs.\nIn the middle of the video, the camera sometimes zooms in and sometimes zooms out. The beetle continues to shuttle between leaves and branches, and gradually disappears from the screen, leaving only a black corner.\nAt the end of the video, the camera gradually zooms out, and the beetle disappears from the screen again. The picture becomes blurred, and then a text appears, first showing the words \"Nature Clip\" and then changing to \"Subscribe\", with a light blue sky in the background.\nThe video ends, and the word \"Subscribe\" is enlarged and close to the camera.", "events": [ { "event": "At the beginning of the video, the camera is focused on some green plant leaves and black gravel on the ground. The edges of the leaves are slightly curled and have water droplets on the surface. There are branches interspersed between the leaves.", "visual_elements": [ { "content": "The camera is focused on some green leaves and black gravel on the ground.", "type": "camera", "weight": 3 }, { "content": "The edges of the leaves are slightly curled, with water droplets on the surface.", "type": "scene", "weight": 1 }, { "content": "There are branches interspersed between the leaves.", "type": "scene", "weight": 2 } ] }, { "event": "The camera slowly moves to the left, gradually approaching the area under a leaf. Then a beetle slowly crawls out from under the leaf, with a smooth and shiny body. The beetle has slender antennae and legs.", "visual_elements": [ { "content": "The camera moves slowly to the left.", "type": "camera", "weight": 3 }, { "content": "The camera gradually approaches the area under a leaf.", "type": "camera", "weight": 3 }, { "content": "Then, the camera focuses on a black beetle.", "type": "camera", "weight": 3 }, { "content": "The beetle slowly crawls out from under the leaf and crawls to the left side of the screen.", "type": "action", "weight": 3 }, { "content": "The beetle's body is smooth and shiny.", "type": "attribute", "weight": 1 } ] }, { "event": "In the middle of the video, the camera sometimes zooms in and sometimes zooms out. The beetle continues to shuttle between leaves and branches, and gradually disappears from the screen, leaving only a black corner.", "visual_elements": [ { "content": "The camera sometimes zooms in and sometimes zooms out.", "type": "camera", "weight": 3 }, { "content": "The beetle continues to shuttle between leaves and branches.", "type": "action", "weight": 3 }, { "content": "The camera always keeps tracking the beetle.", "type": "camera", "weight": 3 } ] }, { "event": "At the end of the video, the camera gradually zooms out, and the beetle disappears from the screen again. The picture becomes blurred, and then a text appears, first showing the words \"Nature Clip\" and then changing to \"Subscribe\", with a light blue sky in the background.", "visual_elements": [ { "content": "At the end of the video, the camera gradually zooms out.", "type": "camera", "weight": 3 }, { "content": "The beetle disappears from the screen again.", "type": "attribute", "weight": 3 }, { "content": "The screen becomes blurred, with a light blue sky in the background.", "type": "scene", "weight": 3 }, { "content": "Then a text appears, first showing the words \"Nature Clip\" and then changing to \"Subscribe\".", "type": "action", "weight": 2 } ] }, { "event": "The video ends, and the word \"Subscribe\" is enlarged and close to the camera.", "visual_elements": [ { "content": "The video ends, and the word \"Subscribe\" is enlarged and close to the camera.", "type": "camera", "weight": 3 } ] } ], "n_events": 5, "n_elements": 16 }, { "index": "TUNA_0863", "video_path": "MiraData/video_clips/000005046/000005046720.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 25.24, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a pond with yellowish reeds and weeds growing around it, surrounded by muddy dirt. The camera moves to the right and a pile of weeds appears in the frame.\nThe camera continues to move to the right and a tree stump appears in the frame, with a red cylindrical uncovered box on top of the tree-like shape and a yellow object hanging from the stump by a string.", "events": [ { "event": "The video begins with the camera focusing on a pond with yellowish reeds and weeds growing around it, surrounded by muddy dirt. The camera moves to the right and a pile of weeds appears in the frame.", "visual_elements": [ { "content": "The camera focuses on a pond.", "type": "camera", "weight": 3 }, { "content": "The pond is surrounded by yellow reeds and weeds.", "type": "scene", "weight": 2 }, { "content": "It is surrounded by muddy soil.", "type": "scene", "weight": 2 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "A pile of weeds appears in the frame.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move to the right and a tree stump appears in the frame, with a red cylindrical uncovered box on top of the tree-like shape and a yellow object hanging from the stump by a string.", "visual_elements": [ { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "A tree stump appears in the frame.", "type": "scene", "weight": 3 }, { "content": "On top of the stump is a red cylindrical box without a lid.", "type": "attribute", "weight": 3 }, { "content": "A yellow object hangs from the stump by a rope.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0864", "video_path": "MiraData/video_clips/000005046/000005046877.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 21.12, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a green leaf, the background is blurred, the camera remains still and the camera focuses on the leaf.\nThe camera gradually moves to the right and zooms in, the other side of the leaf enters the frame, the leaf shakes in the wind and the background is blurred.", "events": [ { "event": "The video begins with the camera focusing on a green leaf, the background is blurred, the camera remains still and the camera focuses on the leaf.", "visual_elements": [ { "content": "The camera focusing on a green leaf.", "type": "camera", "weight": 3 }, { "content": "The background is blurred.", "type": "scene", "weight": 2 }, { "content": "The camera remains still.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the leaf.", "type": "camera", "weight": 3 } ] }, { "event": "The camera gradually moves to the right and zooms in, the other side of the leaf enters the frame, the leaf shakes in the wind and the background is blurred.", "visual_elements": [ { "content": "The camera gradually moves to the right and zooms in.", "type": "camera", "weight": 3 }, { "content": "The other side of the leaf enters the frame.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 6 }, { "index": "TUNA_0865", "video_path": "MiraData/video_clips/000005046/000005046922.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 29.56, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down on a grove of coconut trees from a high altitude. The camera slowly moves forward and in the distance a blue sea with waves gently lapping against the coastline.\nThe camera continues to move forward with a panoramic view of the entire coconut grove and coastline with houses and animals on the ground. The sea is calm and vast in the distance, the skyline meets the sea level and there are a few white clouds dotting the sky.\nFinally, the camera stays in an overhead perspective, showing the vast expanse of ocean and blue sky.", "events": [ { "event": "The video begins with the camera looking down on a grove of coconut trees from a high altitude. The camera slowly moves forward and in the distance a blue sea with waves gently lapping against the coastline.", "visual_elements": [ { "content": "The camera looking down on a grove of coconut trees from a high altitude.", "type": "camera", "weight": 3 }, { "content": "The sun is shining and the trees are dappled.", "type": "scene", "weight": 1 }, { "content": "The camera slowly moves forward.", "type": "camera", "weight": 3 }, { "content": "In the distance there was a blue sea.", "type": "scene", "weight": 2 }, { "content": "Waves gently lapping against the coastline.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward with a panoramic view of the entire coconut grove and coastline with houses and animals on the ground. The sea is calm and vast in the distance, the skyline meets the sea level and there are a few white clouds dotting the sky.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Panoramic view of the entire coconut grove and coastline.", "type": "scene", "weight": 2 }, { "content": "There are houses and animals on the ground.", "type": "scene", "weight": 1 }, { "content": "The sea is calm and vast in the distance.", "type": "scene", "weight": 2 }, { "content": "The skyline meets the sea level.", "type": "attribute", "weight": 2 }, { "content": "There are a few white clouds dotting the sky.", "type": "attribute", "weight": 2 } ] }, { "event": "Finally, the camera stays in an overhead perspective, showing the vast expanse of ocean and blue sky.", "visual_elements": [ { "content": "The camera stays in an overhead perspective.", "type": "camera", "weight": 3 }, { "content": "The camera shows the vast expanse of ocean and blue sky.", "type": "scene", "weight": 3 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0866", "video_path": "MiraData/video_clips/000005047/000005047008.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 25.96, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a pink flower and a bee stops to collect nectar from the flower, the background is blurred. The bee stays on the flower for a moment. The bee flies away from the top side of the frame. The camera moves up and the bee flies to the flower above for honey.\nThe camera stays fixed and the video ends with the bee flying away from the flower out of the frame and the branch gently swaying.", "events": [ { "event": "At the beginning of the video, the camera focuses on a pink flower and a bee stops to collect nectar from the flower, the background is blurred. The bee stays on the flower for a moment. The bee flies away from the top side of the frame. The camera moves up and the bee flies to the flower above for honey.", "visual_elements": [ { "content": "The camera focuses on a pink flower.", "type": "camera", "weight": 3 }, { "content": "A bee stops to collect nectar from the flower.", "type": "action", "weight": 3 }, { "content": "The background is blurred.", "type": "scene", "weight": 2 }, { "content": "The bee stays on the flower for a moment.", "type": "action", "weight": 3 }, { "content": "The bee flies away from the top side of the frame.", "type": "action", "weight": 3 }, { "content": "The camera moves up.", "type": "camera", "weight": 3 }, { "content": "The bee flies to the flower above for honey.", "type": "action", "weight": 3 } ] }, { "event": "The camera stays fixed and the video ends with the bee flying away from the flower out of the frame and the branch gently swaying.", "visual_elements": [ { "content": "The camera stays fixed.", "type": "camera", "weight": 3 }, { "content": "The video ends with the bee flying away from the flower out of the frame.", "type": "action", "weight": 3 }, { "content": "The branch gently swaying.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0867", "video_path": "MiraData/video_clips/000005047/000005047020.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 22.09, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera rises slowly from the ground of a coconut grove, and the perspective gradually expands. The coconut trees are tall and straight, their shadows casting long shadows on the ground. In the background is the sea, with waves gently lapping at the shore. As the camera rises, more coconut trees come into view.\nThe camera continues to rise and the view gradually shifts from the ground to the air, overlooking the entire coconut grove and the coastline. A few thatched huts can be seen along the shoreline now, with a few figures moving around next to them. The sea water is crystal clear.\nAt the end of the video, the camera stays high in the air, overlooking the entire coastline and coconut grove.", "events": [ { "event": "At the beginning of the video, the camera rises slowly from the ground of a coconut grove, and the perspective gradually expands. The coconut trees are tall and straight, their shadows casting long shadows on the ground. In the background is the sea, with waves gently lapping at the shore. As the camera rises, more coconut trees come into view.", "visual_elements": [ { "content": "The camera rises slowly from the ground of a coconut grove.", "type": "camera", "weight": 3 }, { "content": "The perspective gradually expands.", "type": "scene", "weight": 2 }, { "content": "The coconut trees are tall and straight.", "type": "scene", "weight": 2 }, { "content": "The shadows of coconut trees casties long shadows on the ground.", "type": "attribute", "weight": 2 }, { "content": "In the background is the sea.", "type": "scene", "weight": 2 }, { "content": "Waves gently lapping at the shore.", "type": "attribute", "weight": 2 }, { "content": "As the camera rises, more coconut trees come into view.", "type": "camera", "weight": 3 } ] }, { "event": "The camera continues to rise and the view gradually shifts from the ground to the air, overlooking the entire coconut grove and the coastline. A few thatched huts can be seen along the shoreline now, with a few figures moving around next to them. The sea water is crystal clear.", "visual_elements": [ { "content": "The camera continues to rise and the view gradually shifts from the ground to the air.", "type": "camera", "weight": 3 }, { "content": "Overlooking the entire coconut grove and the coastline.", "type": "scene", "weight": 3 }, { "content": "There are a few thatched cottages along the coast.", "type": "attribute", "weight": 2 }, { "content": "A few figures moving around.", "type": "attribute", "weight": 2 }, { "content": "The sea is crystal clear.", "type": "attribute", "weight": 1 } ] }, { "event": "At the end of the video, the camera stays high in the air, overlooking the entire coastline and coconut grove.", "visual_elements": [ { "content": "The camera stays high in the air.", "type": "camera", "weight": 3 }, { "content": "The camera is overlooking the entire coastline and coconut grove.", "type": "scene", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0868", "video_path": "MiraData/video_clips/000005047/000005047099.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 23.2, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a blurry background. The camera gradually moves to the left and some purple flowers appear.\nThe camera continues to move to the left and the flowers gradually leave the frame and the image becomes blurred.", "events": [ { "event": "The video begins with the camera focusing on a blurry background. The camera gradually moves to the left and some purple flowers appear.", "visual_elements": [ { "content": "The camera focusing on a blurry background.", "type": "camera", "weight": 3 }, { "content": "The camera gradually moves to the left.", "type": "camera", "weight": 3 }, { "content": "The camera gradually moves to the left and some purple flowers appear.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move to the left and the flowers gradually leave the frame and the image becomes blurred.", "visual_elements": [ { "content": "The camera continues to move to the left.", "type": "camera", "weight": 3 }, { "content": "The flowers gradually leave the frame.", "type": "attribute", "weight": 2 }, { "content": "The image becomes blurred.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 6 }, { "index": "TUNA_0869", "video_path": "MiraData/video_clips/000005047/000005047101.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.08, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a close-up of a rough piece of tree bark, which is covered with moss and cracks and is mainly brown and green.\nThen, the camera zooms out and continues to move to the lower right, with a snail shell gradually entering the center of the picture. The snail shell is spiral-shaped with brown stripes on the surface.\nThe camera continues to move to the lower right, and the snail's entire body gradually enters the frame. The snail's black tentacles gently sway. A green leaf appears in the frame, and as the camera moves, the leaf gradually occupies the lower half of the frame.", "events": [ { "event": "The video begins with a close-up of a rough piece of tree bark, which is covered with moss and cracks and is mainly brown and green.", "visual_elements": [ { "content": "At the beginning of the video, the camera zooms in on a rough piece of bark.", "type": "camera", "weight": 3 }, { "content": "The surface of the bark is covered with moss and cracks, mainly brown and green.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, the camera zooms out and continues to move to the lower right, with a snail shell gradually entering the center of the picture. The snail shell is spiral-shaped with brown stripes on the surface.", "visual_elements": [ { "content": "Then, the camera zooms out and continues to move to the lower right.", "type": "camera", "weight": 3 }, { "content": "The shell of a snail gradually enters the center of the picture.", "type": "attribute", "weight": 3 }, { "content": "The snail's shell is spiral-shaped and has brown stripes on the surface.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move to the lower right, and the snail's entire body gradually enters the frame. The snail's black tentacles gently sway. A green leaf appears in the frame, and as the camera moves, the leaf gradually occupies the lower half of the frame.", "visual_elements": [ { "content": "The camera continues to move to the lower right, and the whole body of the snail gradually enters the picture.", "type": "camera", "weight": 3 }, { "content": "The snail's black tentacles gently sway.", "type": "action", "weight": 1 }, { "content": "A green leaf appears in the picture.", "type": "attribute", "weight": 2 }, { "content": "As the camera moves, the leaf gradually occupies the lower half of the picture.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 9 }, { "index": "TUNA_0870", "video_path": "MiraData/video_clips/000005047/000005047108.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 23.64, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a rough gray rock surface, and the picture is blurry. As the camera gradually focuses, the texture of the rock becomes clear. There are some green moss and wet brown soil on the rock surface.\nThe camera slowly moves to the lower left, continuing to show the surface of the rock. A snail appears in the picture, with its shell showing brown and yellow stripes. The snail lies on the rock surface, its gray and white tentacles swaying slightly.\nFinally, the camera is kept steady to get a close-up shot of the snail, showing the texture and sheen of its shell. The snail is facing up to a higher rock structure on the left side of the frame. The rock is relatively wet, and the green moss on it is very visible.", "events": [ { "event": "At the beginning of the video, the camera is focused on a rough gray rock surface, and the picture is blurry. As the camera gradually focuses, the texture of the rock becomes clear. There are some green moss and wet brown soil on the rock surface.", "visual_elements": [ { "content": "The camera is focused on a rough grey rock surface.", "type": "camera", "weight": 3 }, { "content": "The image is blurry.", "type": "camera", "weight": 2 }, { "content": "As the camera gradually focuses, the texture of the rock becomes clear.", "type": "camera", "weight": 3 }, { "content": "There is some green moss and wet brown dirt on the rock surface.", "type": "scene", "weight": 2 } ] }, { "event": "The camera slowly moves to the lower left, continuing to show the surface of the rock. A snail appears in the picture, with its shell showing brown and yellow stripes. The snail lies on the rock surface, its gray and white tentacles swaying slightly.", "visual_elements": [ { "content": "The camera slowly moves to the lower left, continuing to show the surface of the rock.", "type": "camera", "weight": 3 }, { "content": "A snail appears in the frame.", "type": "attribute", "weight": 3 }, { "content": "The snail's shell is striped with brown and yellow.", "type": "attribute", "weight": 2 }, { "content": "The snail lies on the rock surface.", "type": "action", "weight": 3 }, { "content": "The snail's grey and white tentacles sway slightly.", "type": "action", "weight": 1 } ] }, { "event": "Finally, the camera is kept steady to get a close-up shot of the snail, showing the texture and sheen of its shell. The snail is facing up to a higher rock structure on the left side of the frame. The rock is relatively wet, and the green moss on it is very visible.", "visual_elements": [ { "content": "Finally, the camera remains steady.", "type": "camera", "weight": 3 }, { "content": "The camera takes a close-up shot of the snail, showing the texture and sheen of the snail's shell.", "type": "attribute", "weight": 2 }, { "content": "The snail is facing the higher rock structure on the left side of the frame.", "type": "attribute", "weight": 2 }, { "content": "The rock is relatively wet, and the green moss on it is very obvious.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 13 }, { "index": "TUNA_0871", "video_path": "MiraData/video_clips/000005047/000005047109.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 21.04, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a bunch of white flowers with a black background.\nThe camera moves to the left to show the full bouquet of flowers, pink flowers enter the frame and the background becomes blurred with white flowers in the background.", "events": [ { "event": "The video begins with the camera focusing on a bunch of white flowers with a black background.", "visual_elements": [ { "content": "The camera focusing on a bunch of white flower.", "type": "camera", "weight": 3 }, { "content": "The background is black.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the left to show the full bouquet of flowers, pink flowers enter the frame and the background becomes blurred with white flowers in the background.", "visual_elements": [ { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "The image shows a full view of the bouquet.", "type": "attribute", "weight": 2 }, { "content": "Pink flowers enter the frame.", "type": "attribute", "weight": 2 }, { "content": "The background becomes blurred.", "type": "scene", "weight": 2 }, { "content": "White flowers in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0872", "video_path": "MiraData/video_clips/000005047/000005047115.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 21.96, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera pausing on a blurry background with some fuzzy flowers in the background.\nThe camera continues to move forward and the white flowers in the frame become clearer. The camera focuses on a white flower and the blurring of the flower bush becomes visible.", "events": [ { "event": "The video begins with the camera pausing on a blurry background with some fuzzy flowers in the background.", "visual_elements": [ { "content": "The camera pausing on a blurry background.", "type": "camera", "weight": 3 }, { "content": "Some fuzzy flowers in the blurry background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward and the white flowers in the frame become clearer. The camera focuses on a white flower and the blurring of the flower bush becomes visible.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The white flowers in the frame become clearer.", "type": "attribute", "weight": 2 }, { "content": "The camera focuses on a white flower", "type": "camera", "weight": 3 }, { "content": "The blurring of the flower bush becomes visible.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 6 }, { "index": "TUNA_0873", "video_path": "MiraData/video_clips/000005047/000005047150.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 23.16, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a tall tree in the center of the frame, which is surrounded by a large green meadow. Some houses and buildings are in the distance, and in the background are rolling hills covered with green vegetation.\nThe camera moves slowly to the right around the tree and pans from the side of the tree to the front of the tree. The camera pans to the left and some black cows can be seen grazing leisurely in the meadow, the cows are scattered in different locations in the meadow.", "events": [ { "event": "The video begins with a tall tree in the center of the frame, which is surrounded by a large green meadow. Some houses and buildings are in the distance, and in the background are rolling hills covered with green vegetation.", "visual_elements": [ { "content": "In the center of the frame is a tree.", "type": "scene", "weight": 3 }, { "content": "The tree is surrounded by a large green meadow.", "type": "attribute", "weight": 2 }, { "content": "Some houses and buildings are in the distance.", "type": "scene", "weight": 2 }, { "content": "The background is rolling hills.", "type": "attribute", "weight": 2 }, { "content": "Hills are covered with green vegetation.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves slowly to the right around the tree and pans from the side of the tree to the front of the tree. The camera pans to the left and some black cows can be seen grazing leisurely in the meadow, the cows are scattered in different locations in the meadow.", "visual_elements": [ { "content": "The camera moves slowly to the right around the tree and pans from the side of the tree to the front of the tree.", "type": "camera", "weight": 3 }, { "content": "Trees are shifted left in the frame.", "type": "scene", "weight": 3 }, { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "Some black cows are grazing leisurely in the meadow.", "type": "scene", "weight": 2 }, { "content": "The cows are scattered in different locations in the meadow.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0874", "video_path": "MiraData/video_clips/000005047/000005047207.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 22.04, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera looks down on a meandering river from a high altitude, and on both sides of the river are vast green meadows and hills. In the middle of the river there are some shoals and small islands. The camera moves slowly forward, gradually drawing closer to the river and the surrounding landscape.\nThe camera moves upwards and beside the river there is a winding path which follows the course of the river. Two cars can be seen traveling on the path. The hills in the distance gradually become clearer and they are covered with dense green vegetation.\nThe camera continues to move forward and the moving vehicles disappear into the frame. Mountains come into view in the distance. The tops of the mountains are shrouded in clouds. The camera slowly rises and the entire valley can be seen in full view.\nThe camera suddenly rotates and new hills, covered with vegetation, appear in the frame.", "events": [ { "event": "At the beginning of the video, the camera looks down on a meandering river from a high altitude, and on both sides of the river are vast green meadows and hills. In the middle of the river there are some shoals and small islands. The camera moves slowly forward, gradually drawing closer to the river and the surrounding landscape.", "visual_elements": [ { "content": "The camera looks down on a meandering river from a high altitude", "type": "camera", "weight": 3 }, { "content": "On both sides of the river are vast green meadows and hills.", "type": "attribute", "weight": 2 }, { "content": "In the middle of the river there are some shoals and small islands.", "type": "attribute", "weight": 2 }, { "content": "The camera moves slowly forward.", "type": "camera", "weight": 3 }, { "content": "The camera gradually draws closer to the river and the surrounding landscape.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves upwards and beside the river there is a winding path which follows the course of the river. Two cars can be seen traveling on the path. The hills in the distance gradually become clearer and they are covered with dense green vegetation.", "visual_elements": [ { "content": "The camera moves upwards.", "type": "camera", "weight": 3 }, { "content": "Beside the river there is a winding path.", "type": "scene", "weight": 3 }, { "content": "The path follows the course of the river.", "type": "attribute", "weight": 2 }, { "content": "Two cars are traveling on the path.", "type": "attribute", "weight": 2 }, { "content": "The hills in the distance gradually become clearer.", "type": "scene", "weight": 2 }, { "content": "The hills are covered with dense green vegetation.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward and the moving vehicles disappear into the frame. Mountains come into view in the distance. The tops of the mountains are shrouded in clouds. The camera slowly rises and the entire valley can be seen in full view.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The moving vehicles disappear into the frame.", "type": "scene", "weight": 2 }, { "content": "Mountains come into view in the distance.", "type": "scene", "weight": 2 }, { "content": "The tops of the mountains are shrouded in clouds.", "type": "attribute", "weight": 2 }, { "content": "The camera slowly rises.", "type": "camera", "weight": 3 }, { "content": "The entire valley can be seen in full view.", "type": "scene", "weight": 1 } ] }, { "event": "The camera suddenly rotates and new hills, covered with vegetation, appear in the frame.", "visual_elements": [ { "content": "The camera suddenly rotates to reveal a new hill.", "type": "camera", "weight": 3 }, { "content": "Hills are covered with vegetation.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0875", "video_path": "MiraData/video_clips/000005047/000005047281.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 25.56, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down on a vast green field from a high vantage point, with rolling hills and a cloudy sky in the distance, and a hill covered in greenery in the near distance, with a winding path on the hill and some vehicles on the path.\nThe camera moves to the right to show the other side of the hill, with more hills and fields appearing in the distance.", "events": [ { "event": "The video begins with the camera looking down on a vast green field from a high vantage point, with rolling hills and a cloudy sky in the distance, and a hill covered in greenery in the near distance, with a winding path on the hill and some vehicles on the path.", "visual_elements": [ { "content": "The camera looks down on a vast green field from above.", "type": "camera", "weight": 3 }, { "content": "In the distance are rolling hills and a cloudy sky.", "type": "scene", "weight": 2 }, { "content": "Nearby is a hill covered in greenery.", "type": "scene", "weight": 2 }, { "content": "On the hill is a winding path.", "type": "attribute", "weight": 2 }, { "content": "There are some vehicles on the trail.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves to the right to show the other side of the hill, with more hills and fields appearing in the distance.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The camera shows the other side of the hill", "type": "scene", "weight": 2 }, { "content": "More hills and fields appear in the distance.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0876", "video_path": "MiraData/video_clips/000005047/000005047480.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 22.56, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a close-up of a snail with yellow and black stripes on its shell. The snail's gray and white tentacles extend out and slowly move across a rough rock surface. The background is a rough rock with some small bumps and textures on the surface, and the color is a mixture of light brown and gray.\nThen, the camera starts to shake. The snail slowly crawls from the upper right to the lower left of the picture, leaving a wet trace. The snail's tentacles are slightly swaying, and its body is close to the rock. Transparent bubbles are coming out between the snail's shell and head.", "events": [ { "event": "The video begins with a close-up of a snail with yellow and black stripes on its shell. The snail's gray and white tentacles extend out and slowly move across a rough rock surface. The background is a rough rock with some small bumps and textures on the surface, and the color is a mixture of light brown and gray.", "visual_elements": [ { "content": "The camera is close-up on a snail.", "type": "camera", "weight": 3 }, { "content": "The snail's shell is striped with yellow and black.", "type": "attribute", "weight": 3 }, { "content": "The snail's gray and white tentacles are stretched out, slowly moving on a rough rock surface.", "type": "attribute", "weight": 3 }, { "content": "The background is a rough rock with some small bumps and textures on the rock surface.", "type": "scene", "weight": 3 }, { "content": "The rock is a mixture of light brown and gray.", "type": "scene", "weight": 2 } ] }, { "event": "Then, the camera starts to shake. The snail slowly crawls from the upper right to the lower left of the picture, leaving a wet trace. The snail's tentacles are slightly swaying, and its body is close to the rock. Transparent bubbles are coming out between the snail's shell and head.", "visual_elements": [ { "content": "Then, the camera starts to shake.", "type": "camera", "weight": 3 }, { "content": "The snail slowly crawls from the upper right to the lower left of the picture, leaving a wet trace.", "type": "action", "weight": 3 }, { "content": "The snail's tentacles are slightly swaying, and its body is close to the rock.", "type": "attribute", "weight": 2 }, { "content": "Transparent bubbles are coming out between the snail's shell and head.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0877", "video_path": "MiraData/video_clips/000005047/000005047553.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 25.24, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the image of a tranquil sea with several towering limestone peaks in the distance, covered in dense green vegetation. Two fishing boats moored side by side appear in the center of the frame.\nThe camera rotates from the side of the boat to the left to the front of the boat, a full view of the entire fleet is gradually revealed. The four fishing boats are close together, anchored in the tranquil sea. Some smaller boats can be seen on the sea in the distance.", "events": [ { "event": "The video begins with the image of a tranquil sea with several towering limestone peaks in the distance, covered in dense green vegetation. Two fishing boats moored side by side appear in the center of the frame.", "visual_elements": [ { "content": "The frame is of a tranquil sea.", "type": "scene", "weight": 3 }, { "content": "In the distance are several towering limestone peaks covered in lush green vegetation.", "type": "scene", "weight": 3 }, { "content": "Two fishing boats moored side by side appear in the center of the frame.", "type": "scene", "weight": 3 } ] }, { "event": "The camera rotates from the side of the boat to the left to the front of the boat, a full view of the entire fleet is gradually revealed. The four fishing boats are close together, anchored in the tranquil sea. Some smaller boats can be seen on the sea in the distance.", "visual_elements": [ { "content": "The camera rotates from the side of the boat to the left to the front of the boat.", "type": "camera", "weight": 3 }, { "content": "A full view of the entire fleet is shown.", "type": "scene", "weight": 2 }, { "content": "The four fishing boats are close together.", "type": "attribute", "weight": 2 }, { "content": "Some smaller boats are on the sea in the distance.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0878", "video_path": "MiraData/video_clips/000005047/000005047559.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 22.48, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is pointed at a body of water with a group of swans on the water. The swans have white feathers, slender necks and orange beaks. In addition to the swans, there are also several small black and white Canadian geese with brown wings. The swans swim leisurely on the water, which is sparkling and rippling. The water in front of the screen reflects the sun.\nThe camera is aimed at the side of the swan and moves slowly to the left. Some of the swans gathered together lower their heads to look for food in the water, while others look up and look around. The smallest swan in the innermost disappears from the picture. The Canadian geese swims on the periphery of the swan group. In the background, the water is wide.", "events": [ { "event": "At the beginning of the video, the camera is pointed at a body of water with a group of swans on the water. The swans have white feathers, slender necks and orange beaks. In addition to the swans, there are also several small black and white Canadian geese with brown wings. The swans swim leisurely on the water, which is sparkling and rippling. The water in front of the screen reflects the sun.", "visual_elements": [ { "content": "The camera is pointed at a body of water.", "type": "camera", "weight": 3 }, { "content": "There is a group of swans on the water.", "type": "attribute", "weight": 3 }, { "content": "The swans have white feathers, long necks and orange beaks.", "type": "attribute", "weight": 3 }, { "content": "In addition to the swans, there are also several black and white Canada geese.", "type": "attribute", "weight": 2 }, { "content": "Canada geese are small in size and have brown wings.", "type": "attribute", "weight": 1 }, { "content": "Swans swim leisurely on the water.", "type": "action", "weight": 3 }, { "content": "The water is sparkling with subtle ripples.", "type": "scene", "weight": 2 }, { "content": "The sun is reflected on the water in front of the picture.", "type": "scene", "weight": 1 } ] }, { "event": "The camera is aimed at the side of the swan and moves slowly to the left. Some of the swans gathered together lower their heads to look for food in the water, while others look up and look around. The smallest swan in the innermost disappears from the picture. The Canadian geese swims on the periphery of the swan group. In the background, the water is wide.", "visual_elements": [ { "content": "The camera is aimed at the side of the swan and moves slowly to the left.", "type": "camera", "weight": 3 }, { "content": "Some of the swans gathered together lower their heads to look for food in the water, while others look up and look around.", "type": "action", "weight": 2 }, { "content": "The smallest swan in the innermost disappears from the picture.", "type": "action", "weight": 1 }, { "content": "The Canadian geese swims on the periphery of the swan group.", "type": "action", "weight": 2 }, { "content": "In the background, the water is wide.", "type": "scene", "weight": 3 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0879", "video_path": "MiraData/video_clips/000005047/000005047771.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 22.19, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera resting on an open grassy field surrounded by trees, with rolling snow-capped mountains in the distance.\nAs the camera closes in, the peaks are covered in thick snow and the rocky textures of the mountains are clearly visible. More peaks and trees are shown, with the mountains varying in height and shape, some sharp and others more gentle.The camera eventually fixes on a gentle depression at the foot of a snow-capped mountain.\nBelow the frame are dark woods and steep mountain peaks.", "events": [ { "event": "The video begins with the camera resting on an open grassy field surrounded by trees, with rolling snow-capped mountains in the distance.", "visual_elements": [ { "content": "The camera lingers on a patch of grass.", "type": "camera", "weight": 3 }, { "content": "The meadow is surrounded by trees.", "type": "scene", "weight": 2 }, { "content": "In the distance are rolling snow-capped mountains。", "type": "scene", "weight": 3 } ] }, { "event": "As the camera closes in, the peaks are covered in thick snow and the rocky textures of the mountains are clearly visible. More peaks and trees are shown, with the mountains varying in height and shape, some sharp and others more gentle.The camera eventually fixes on a gentle depression at the foot of a snow-capped mountain.\nBelow the frame are dark woods and steep mountain peaks.", "visual_elements": [ { "content": "The camera closes in on the snow-covered mountains.", "type": "camera", "weight": 3 }, { "content": "The peaks are covered in thick snow.", "type": "attribute", "weight": 2 }, { "content": "The rock texture of the mountains is also clearly visible, with more peaks and trees appearing.", "type": "attribute", "weight": 2 }, { "content": "Mountains vary in height and shape.", "type": "attribute", "weight": 2 }, { "content": "The mountains have sharp peaks.", "type": "attribute", "weight": 1 }, { "content": "Some of the mountains are gentle.", "type": "attribute", "weight": 1 }, { "content": "The camera eventually fixes on a gentle depression at the foot of a snow-capped mountain.", "type": "scene", "weight": 3 }, { "content": "Below the frame are dark woods and steep mountain peaks.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0880", "video_path": "MiraData/video_clips/000005048/000005048004.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 26.24, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down from a high altitude, showing a lush green forest. The trees are dense and neatly arranged, creating a geometric pattern. The camera rotates counterclockwise to show more trees and grass. The shadows of the trees cast long shadows on the ground. Trees vary in type and height.", "events": [ { "event": "The video begins with the camera looking down from a high altitude, showing a lush green forest. The trees are dense and neatly arranged, creating a geometric pattern. The camera rotates counterclockwise to show more trees and grass. The shadows of the trees cast long shadows on the ground. Trees vary in type and height.", "visual_elements": [ { "content": "the camera looking down from a high altitude, showing a lush green forest and steep mountains.", "type": "camera", "weight": 3 }, { "content": "The trees are dense and neatly arranged, creating a geometric pattern.", "type": "attribute", "weight": 2 }, { "content": "The camera rotates counterclockwise to show more trees and grass.", "type": "camera", "weight": 3 }, { "content": "The shadows of the trees cast long shadows on the ground.", "type": "attribute", "weight": 1 }, { "content": "Trees vary in type and height.", "type": "attribute", "weight": 2 }, { "content": "Some of the trees are full of flowers in bright colors.", "type": "attribute", "weight": 2 } ] } ], "n_events": 1, "n_elements": 6 }, { "index": "TUNA_0881", "video_path": "MiraData/video_clips/000005048/000005048129.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 20.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a green grassy field with a few dewdrops glistening on the grass. The camera moves upward, gradually revealing more of the grass. As the camera continues to move upwards, the edges of the grass begin to appear, and distant objects gradually come into view.\nAs the camera continues to move upwards, some rocks appear at the end of the meadow. As the camera moves further upward, the distant hills and trees become clear.\nA full view of the entire village is gradually revealed, surrounded by green fields and trees. The camera eventually stops at this high point.", "events": [ { "event": "The video begins with a green grassy field with a few dewdrops glistening on the grass. The camera moves upward, gradually revealing more of the grass. As the camera continues to move upwards, the edges of the grass begin to appear, and distant objects gradually come into view.", "visual_elements": [ { "content": "A green grassy field with a few dewdrops glistening on the grass.", "type": "scene", "weight": 3 }, { "content": "The camera moves upward gradually revealing more of the grass.", "type": "camera", "weight": 3 }, { "content": "The camera continues to move upward.", "type": "camera", "weight": 3 }, { "content": "The edges of the grass begin to appear.", "type": "scene", "weight": 2 }, { "content": "Distant objects gradually come into view.", "type": "scene", "weight": 2 } ] }, { "event": "As the camera continues to move upwards, some rocks appear at the end of the meadow. As the camera moves further upward, the distant hills and trees become clear.", "visual_elements": [ { "content": "The camera continues to move upwards.", "type": "camera", "weight": 3 }, { "content": "Some rocks appear at the end of the meadow.", "type": "scene", "weight": 3 }, { "content": "The distant hills and trees become clear.", "type": "scene", "weight": 2 } ] }, { "event": "A full view of the entire village is gradually revealed, surrounded by green fields and trees. The camera eventually stops at this high point.", "visual_elements": [ { "content": "A full view of the entire village is gradually revealed.", "type": "scene", "weight": 2 }, { "content": "The village is surrounded by green fields and trees.", "type": "attribute", "weight": 2 }, { "content": "The camera eventually stops at this high point.", "type": "camera", "weight": 2 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0882", "video_path": "MiraData/video_clips/000005048/000005048235.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 19.52, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera shooting from the bottom up pausing on a leafy tree branch with golden colored leaves. In the background is a blue sky with a few white clouds in the sky.\nAs the camera moves forward, the range of leaves in the frame gets smaller and smaller, the details of these leaves become clearer, the texture and color of the leaves change in the frame, and the blue sky and white clouds in the background are clearly visible.", "events": [ { "event": "The video begins with the camera shooting from the bottom up pausing on a leafy tree branch with golden colored leaves. In the background is a blue sky with a few white clouds in the sky.", "visual_elements": [ { "content": "The camera stops at a branch of a leafy tree from below.", "type": "camera", "weight": 3 }, { "content": "The leaves are a golden color.", "type": "scene", "weight": 2 }, { "content": "In the background is a blue sky.", "type": "scene", "weight": 2 }, { "content": "There are a few white clouds in the sky.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera moves forward, the range of leaves in the frame gets smaller and smaller, the details of these leaves become clearer, the texture and color of the leaves change in the frame, and the blue sky and white clouds in the background are clearly visible.", "visual_elements": [ { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "The range of leaves in the frame becomes smaller and smaller.", "type": "scene", "weight": 3 }, { "content": "The details of these leaves become clearer.", "type": "scene", "weight": 3 }, { "content": "The texture and color of the leaves change in the frame.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0883", "video_path": "MiraData/video_clips/000005048/000005048248.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.96, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on an open farmland with neatly plowed fields in wavy lines. There are several sheep grazing in the field. In the middle of the frame, there is a sheep facing the right side of the camera, and there are two more behind it. The sheep have yellow and white fur. In the foreground of the camera, there are several tall yellow plant stems on the yellow and green weeds, with slender inflorescences on top. In the background, the sky is blue, and there are neat rows of bare trees on the left side of the frame.\nThe camera moves to the right, passing through a column full of yellow matter, with tall plant stems still in the frame, and the wind gently blowing the plant inflorescences. More sheep appear in the frame, and the sheep continue to graze in the fields.\nThen, the camera continues to move slowly to the right, and the sky in the background gradually becomes larger.\nIn the second half of the video, the camera stabilizes. The two sheep closest to the camera lower their heads and move forward a few times to continue eating grass. Throughout the process, the camera always maintains a long-range shot.", "events": [ { "event": "The video begins with the camera focused on an open farmland with neatly plowed fields in wavy lines. There are several sheep grazing in the field. In the middle of the frame, there is a sheep facing the right side of the camera, and there are two more behind it. The sheep have yellow and white fur. In the foreground of the camera, there are several tall yellow plant stems on the yellow and green weeds, with slender inflorescences on top. In the background, the sky is blue, and there are neat rows of bare trees on the left side of the frame.", "visual_elements": [ { "content": "The camera is pointed at an open farmland.", "type": "camera", "weight": 3 }, { "content": "There are neat traces of tillage on the ground, showing wavy lines.", "type": "attribute", "weight": 3 }, { "content": "There are several sheep grazing in the farmland.", "type": "action", "weight": 3 }, { "content": "There is a sheep in the middle of the picture facing the right of the camera, and there are two more behind it.", "type": "attribute", "weight": 2 }, { "content": "The sheep's fur is yellow and white.", "type": "attribute", "weight": 2 }, { "content": "There are several tall yellow plant stems on the yellow and green weeds in the foreground of the camera.", "type": "attribute", "weight": 3 }, { "content": "The top of the plant stems is a slender inflorescence.", "type": "attribute", "weight": 2 }, { "content": "In the background, the sky is blue.", "type": "scene", "weight": 3 }, { "content": "On the left side of the picture are neatly arranged bare trees.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the right, passing through a column full of yellow matter, with tall plant stems still in the frame, and the wind gently blowing the plant inflorescences. More sheep appear in the frame, and the sheep continue to graze in the fields.", "visual_elements": [ { "content": "The camera moves to the right, passing through a column full of yellow material.", "type": "camera", "weight": 3 }, { "content": "The plant stems are still in the picture.", "type": "attribute", "weight": 2 }, { "content": "The wind gently blows the plant inflorescence.", "type": "attribute", "weight": 1 }, { "content": "More sheep appear in the picture.", "type": "attribute", "weight": 2 }, { "content": "The sheep continue to graze in the field with their heads down.", "type": "action", "weight": 3 } ] }, { "event": "Then, the camera continues to move slowly to the right, and the sky in the background gradually becomes larger.", "visual_elements": [ { "content": "Then, the camera continues to move slowly to the right.", "type": "camera", "weight": 3 }, { "content": "The range of the sky in the background gradually becomes larger.", "type": "scene", "weight": 2 } ] }, { "event": "In the second half of the video, the camera stabilizes. The two sheep closest to the camera lower their heads and move forward a few times to continue eating grass. Throughout the process, the camera always maintains a long-range shot.", "visual_elements": [ { "content": "In the second half of the video, the camera stabilizes.", "type": "camera", "weight": 3 }, { "content": "The two sheep closest to the camera lower their heads and move forward a few times to continue grazing.", "type": "action", "weight": 2 }, { "content": "Throughout the process, the camera always maintains a long-range shot", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0884", "video_path": "MiraData/video_clips/000005048/000005048294.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 29.76, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a narrow dirt path lined with tall trees whose trunks are covered with moss and vines. The path is paved with gravel and is somewhat rugged. The camera moves forward in the direction of the path, the shadows of the trees casting dappled light on the ground.\nAs the camera advances, the surrounding scenery gradually unfolds. The trees on either side of the path are leafy, forming a natural arching tunnel. Grasses and wildflowers peeking out of the soil along the path.", "events": [ { "event": "The video begins with the camera focusing on a narrow dirt path lined with tall trees whose trunks are covered with moss and vines. The path is paved with gravel and is somewhat rugged. The camera moves forward in the direction of the path, the shadows of the trees casting dappled light on the ground.", "visual_elements": [ { "content": "The camera focusing on a narrow dirt path lined with tall trees.", "type": "scene", "weight": 3 }, { "content": "Trunks are covered with moss and vines.", "type": "attribute", "weight": 2 }, { "content": "The path is paved with gravel and is somewhat rugged.", "type": "attribute", "weight": 2 }, { "content": "The camera moves forward in the direction of the path.", "type": "camera", "weight": 3 }, { "content": "The shadows of the trees casting dappled light on the ground.", "type": "scene", "weight": 1 } ] }, { "event": "As the camera advances, the surrounding scenery gradually unfolds. The trees on either side of the path are leafy, forming a natural arching tunnel. Grasses and wildflowers peeking out of the soil along the path.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The trees on either side of the path are leafy, forming a natural arching tunnel.", "type": "attribute", "weight": 2 }, { "content": "Grasses and wildflowers peeking out of the soil along the path.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0885", "video_path": "MiraData/video_clips/000005048/000005048335.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 30.0, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down from a high altitude on a vast green field. There is a distinct track in the middle of the field. The camera moves forward to show more of the field, and the track in the field becomes more visible; the track is made up of multiple parallel lines. These lines form a regular pattern in the field.\nThe camera continues to move upward as the device gradually becomes smaller and eventually disappears from view.", "events": [ { "event": "The video begins with the camera looking down from a high altitude on a vast green field. There is a distinct track in the middle of the field. The camera moves forward to show more of the field, and the track in the field becomes more visible; the track is made up of multiple parallel lines. These lines form a regular pattern in the field.", "visual_elements": [ { "content": "The camera looking down from a high altitude on a vast green field.", "type": "camera", "weight": 3 }, { "content": "There is a distinct track in the middle of the field.", "type": "attribute", "weight": 3 }, { "content": "The camera moves forward to show more of the field, and the track in the field becomes more visible.", "type": "camera", "weight": 3 }, { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "Tracks in the field are more visible.", "type": "scene", "weight": 2 }, { "content": "The track is made up of multiple parallel lines.", "type": "attribute", "weight": 2 }, { "content": "These lines form a regular pattern in the field.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move upward as the device gradually becomes smaller and eventually disappears from view.", "visual_elements": [ { "content": "The camera continues to move upward.", "type": "camera", "weight": 3 }, { "content": "The device gradually becomes smaller.", "type": "scene", "weight": 2 }, { "content": "The device eventually disappears from view.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0886", "video_path": "MiraData/video_clips/000005048/000005048499.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 27.2, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down from a high altitude, showing a straight country road lined with large green farmland. There are several cars driving along the highway, the vehicles varying in color and model. The camera slowly moves forward to close the view and the details of the farmland on both sides of the highway become clearer.\nThe farmland on both sides of the highway is neatly divided into different areas, with the farmland on the left showing a light green color, while the farmland on the right is a darker green. There are clear signs of mechanical plowing in the farmland, creating parallel lines. The vegetation on both sides of the highway is dense and green, and some small bushes can be seen.", "events": [ { "event": "The video begins with the camera looking down from a high altitude, showing a straight country road lined with large green farmland. There are several cars driving along the highway, the vehicles varying in color and model. The camera slowly moves forward to close the view and the details of the farmland on both sides of the highway become clearer.", "visual_elements": [ { "content": "the camera looking down from a high altitude.", "type": "camera", "weight": 3 }, { "content": "There are several cars driving along the highway.", "type": "scene", "weight": 3 }, { "content": "The road is lined with large green farmland.", "type": "attribute", "weight": 2 }, { "content": "There are several cars driving along the highway.", "type": "attribute", "weight": 2 }, { "content": "Vehicles vary in color and model.", "type": "attribute", "weight": 1 } ] }, { "event": "The farmland on both sides of the highway is neatly divided into different areas, with the farmland on the left showing a light green color, while the farmland on the right is a darker green. There are clear signs of mechanical plowing in the farmland, creating parallel lines. The vegetation on both sides of the highway is dense and green, and some small bushes can be seen.", "visual_elements": [ { "content": "The camera pushes forward and the farmland on both sides of the highway is neatly divided into different areas.", "type": "camera", "weight": 3 }, { "content": "The farmland on the left showing a light green color.", "type": "attribute", "weight": 2 }, { "content": "The farmland on the right is a darker green.", "type": "attribute", "weight": 2 }, { "content": "There are clear signs of mechanical plowing in the farmland, creating parallel lines.", "type": "attribute", "weight": 2 }, { "content": "The vegetation on both sides of the highway is dense and green.", "type": "scene", "weight": 3 }, { "content": "There are some small bushes on the side of the road.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0887", "video_path": "MiraData/video_clips/000005048/000005048676.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 26.88, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a country road with gravel and dirt covering the ground. On either side are tall, dead grass and bushes. In the center of the frame is a woman, walking slowly forward with her back to the camera. She is about medium build and is wearing a black jacket with shoulder length hair that is brown in color. Her hands are in the pockets of her jacket and her pace is steady.\nThe camera follows the woman as she walks forward and the road turns muddy with puddles. She carefully avoids these puddles. As the camera continues to follow, the scenery around her gradually changes, with the dead grass on the left becoming denser and the bushes on the right appearing sparser. The outlines of some trees can be seen in the distance.\nAt the end of the video, the woman continues to walk along the path, with the camera staying behind her at all times, moving slowly and steadily.", "events": [ { "event": "The video begins with the camera focusing on a country road with gravel and dirt covering the ground. On either side are tall, dead grass and bushes. In the center of the frame is a woman, walking slowly forward with her back to the camera. She is about medium build and is wearing a black jacket with shoulder length hair that is brown in color. Her hands are in the pockets of her jacket and her pace is steady.", "visual_elements": [ { "content": "The camera focusing on a country road with gravel and dirt covering the ground.", "type": "scene", "weight": 3 }, { "content": "The ground is covered with gravel and dirt.", "type": "attribute", "weight": 2 }, { "content": "On either side are tall, dead grass and bushes.", "type": "attribute", "weight": 2 }, { "content": "In the center of the frame is a female.", "type": "action", "weight": 3 }, { "content": "Female walking slowly forward with her back to the camera", "type": "action", "weight": 3 }, { "content": "Female is approximately medium build.", "type": "attribute", "weight": 2 }, { "content": "Female is wearing a black jacket.", "type": "attribute", "weight": 2 }, { "content": "Hair is shoulder length and brown in color.", "type": "attribute", "weight": 2 }, { "content": "Female has her hands in the pockets of her jacket.", "type": "action", "weight": 2 }, { "content": "Female has a steady pace", "type": "action", "weight": 1 } ] }, { "event": "The camera follows the woman as she walks forward and the road turns muddy with puddles. She carefully avoids these puddles. As the camera continues to follow, the scenery around her gradually changes, with the dead grass on the left becoming denser and the bushes on the right appearing sparser. The outlines of some trees can be seen in the distance.", "visual_elements": [ { "content": "The camera follows the woman as she walks forward.", "type": "camera", "weight": 3 }, { "content": "The road is muddy with puddles.", "type": "scene", "weight": 2 }, { "content": "Women carefully avoid these puddles.", "type": "action", "weight": 2 } ] }, { "event": "At the end of the video, the woman continues to walk along the path, with the camera staying behind her at all times, moving slowly and steadily.", "visual_elements": [ { "content": "The woman continues to walk along the path.", "type": "action", "weight": 3 }, { "content": "The camera staying behind her at all times, moving slowly and steadily.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0888", "video_path": "MiraData/video_clips/000005048/000005048834.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 22.24, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on the trunk of a tree, which is covered with some green vine plants. The camera moves upward, gradually leaving the base of the trunk, the texture of which is still clearly visible.\nThe camera continues to move upwards and the top of the trunk gradually comes into view. The trunk becomes more elongated and the crown part of the tree enters the frame. The branches of the tree intertwine to form a complex and tight network.", "events": [ { "event": "The video begins with the camera focusing on the trunk of a tree, which is covered with some green vine plants. The camera moves upward, gradually leaving the base of the trunk, the texture of which is still clearly visible.", "visual_elements": [ { "content": "The camera focusing on the trunk of a tree.", "type": "camera", "weight": 3 }, { "content": "The trunk is covered with some green vine plants.", "type": "attribute", "weight": 2 }, { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "The camera gradually moves away from the base of the trunk.", "type": "camera", "weight": 2 }, { "content": "The texture of which is still clearly visible.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move upwards and the top of the trunk gradually comes into view. The trunk becomes more elongated and the crown part of the tree enters the frame. The branches of the tree intertwine to form a complex and tight network.", "visual_elements": [ { "content": "The camera continues to move upwards.", "type": "camera", "weight": 3 }, { "content": "The top of the trunk gradually comes into view.", "type": "attribute", "weight": 3 }, { "content": "The trunk becomes more elongated .", "type": "attribute", "weight": 2 }, { "content": "The crown part of the tree enters the frame.", "type": "attribute", "weight": 3 }, { "content": "The branches of the tree intertwine to form a complex and tight network.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0889", "video_path": "MiraData/video_clips/000005049/000005049037.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 38.21, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down on a vast field from a high altitude, the field appears to be neatly striped and golden in color. The camera moves forward to show a full view of the field. The left edge of the field has a green strip of shrubs, and next to the strip of shrubs is another green field of farmland, which is divided into several neat squares.\nAs the camera moves, two square haystacks appear in the upper right corner of the field. The camera continues to move forward and more haystacks gradually come into view.\nAs the camera continues to move to the right, a cluster of buildings appears at the right edge of the field, surrounded by green shrubs. At the end of the video, the camera moves slowly upwards to show the entire farm and surrounding fields, with the view gradually becoming clearer in the distance.", "events": [ { "event": "The video begins with the camera looking down on a vast field from a high altitude, the field appears to be neatly striped and golden in color. The camera moves forward to show a full view of the field. The left edge of the field has a green strip of shrubs, and next to the strip of shrubs is another green field of farmland, which is divided into several neat squares.", "visual_elements": [ { "content": "The camera looking down on a vast field from a high altitude.", "type": "camera", "weight": 3 }, { "content": "The field appears to be neatly striped.", "type": "attribute", "weight": 2 }, { "content": "The field is golden.", "type": "attribute", "weight": 2 }, { "content": "The camera moves forward to show a full view of the field.", "type": "camera", "weight": 3 }, { "content": "The left edge of the field has a green strip of shrubs", "type": "attribute", "weight": 2 }, { "content": "Next to the strip of shrubs is another green field of farmland.", "type": "attribute", "weight": 2 }, { "content": "The farmland is divided into several neat squares.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera moves, two square haystacks appear in the upper right corner of the field. The camera continues to move forward and more haystacks gradually come into view.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Two square haystacks appear in the upper right corner of the field.", "type": "scene", "weight": 3 }, { "content": "More haystacks gradually come into view.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues to move to the right, a cluster of buildings appears at the right edge of the field, surrounded by green shrubs. At the end of the video, the camera moves slowly upwards to show the entire farm and surrounding fields, with the view gradually becoming clearer in the distance.", "visual_elements": [ { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "A cluster of buildings appears at the right edge of the field.", "type": "scene", "weight": 3 }, { "content": "The building is surrounded by green shrubs.", "type": "attribute", "weight": 2 }, { "content": "Lots of piles of debris next to the building.", "type": "attribute", "weight": 2 }, { "content": "The camera moves slowly upwards to show the entire farm and surrounding fields.", "type": "camera", "weight": 3 }, { "content": "The surrounding fields and buildings are visible.", "type": "scene", "weight": 2 }, { "content": "The camera moves slowly upward.", "type": "camera", "weight": 3 }, { "content": "The camera shows the entire farm and surrounding fields.", "type": "scene", "weight": 2 }, { "content": "The view gradually becoming clearer in the distance.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0890", "video_path": "MiraData/video_clips/000005049/000005049054.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 22.12, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera looking down from a high altitude on a golden colored field that is separated into two parts by a green hedge. The camera remains stationary as the shadow of the hedge casts long shadows across the field. As time passes, a tractor gradually appears in the top right hand corner of the frame with a long trailer being towed behind it.\nThe camera remains in an overhead view as the tractor slowly moves toward the center of the frame. The tractor continues to move forward and the outline of the trailer, which is loaded with neat bales of hay, gradually becomes clear.", "events": [ { "event": "The video begins with the camera looking down from a high altitude on a golden colored field that is separated into two parts by a green hedge. The camera remains stationary as the shadow of the hedge casts long shadows across the field. As time passes, a tractor gradually appears in the top right hand corner of the frame with a long trailer being towed behind it.", "visual_elements": [ { "content": "The camera looking down from a high altitude on a golden colored field.", "type": "camera", "weight": 3 }, { "content": "The field is separated into two parts by a green hedge.", "type": "attribute", "weight": 2 }, { "content": "The camera remains stationary.", "type": "camera", "weight": 3 }, { "content": "The shadow of the hedge casts long shadows across the field.", "type": "scene", "weight": 2 }, { "content": "As time passes, a tractor gradually appears in the top right hand corner of the frame.", "type": "attribute", "weight": 2 }, { "content": "A long trailer being towed behind the tractor.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera remains in an overhead view as the tractor slowly moves toward the center of the frame. The tractor continues to move forward and the outline of the trailer, which is loaded with neat bales of hay, gradually becomes clear.", "visual_elements": [ { "content": "The camera remains in an overhead view.", "type": "camera", "weight": 3 }, { "content": "The tractor slowly moves toward the center of the frame.", "type": "scene", "weight": 2 }, { "content": "The tractor continues to move forward.", "type": "attribute", "weight": 2 }, { "content": "The silhouette of the trailer is becoming clearer.", "type": "attribute", "weight": 2 }, { "content": "The trailer was loaded with neat bales of hay.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 11 }, { "index": "TUNA_0891", "video_path": "MiraData/video_clips/000005049/000005049066.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 29.92, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a vast field covered in a golden crop. There is a tall tree in the center of the frame, woods and some low bushes in the distance, and a clear sky.\nThe camera begins to move slowly to the left and the tree gradually moves from the center of the frame to the right. The full extent of the field is gradually revealed, with some post-harvest marks on the field and distinct rut marks on the ground.", "events": [ { "event": "The video begins with the camera focusing on a vast field covered in a golden crop. There is a tall tree in the center of the frame, woods and some low bushes in the distance, and a clear sky.", "visual_elements": [ { "content": "The video begins with the camera focusing on a vast field.", "type": "camera", "weight": 3 }, { "content": "The field is covered in a golden crop.", "type": "scene", "weight": 3 }, { "content": "There is a tall tree in the center of the frame.", "type": "attribute", "weight": 2 }, { "content": "There are woods and some low bushes in the distance.", "type": "attribute", "weight": 2 }, { "content": "The sky is clear.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera begins to move slowly to the left and the tree gradually moves from the center of the frame to the right. The full extent of the field is gradually revealed, with some post-harvest marks on the field and distinct rut marks on the ground.", "visual_elements": [ { "content": "The camera begins to move slowly to the left.", "type": "camera", "weight": 3 }, { "content": "The tree gradually moves from the center of the frame to the right.", "type": "attribute", "weight": 2 }, { "content": "There are obvious ruts in the ground.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0892", "video_path": "MiraData/video_clips/000005049/000005049904.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 20.55, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a distant snow-covered mountain with a snow-covered summit, a grayish-white mountain, and a blue sky dotted with a few white clouds. In front of the snow-covered mountain are rolling hills, and in front of the hills is a barren mountain.\nThe camera moves backward and the view opens up more and more, showing more mountain ranges and forested landscapes. The layers of the mountains gradually emerge, with barren slopes in the near distance and towering peaks in the far distance, and vegetation gradually increases on the slopes, with green trees and shrubs covering most of the mountain.", "events": [ { "event": "The video begins with the camera focusing on a distant snow-covered mountain with a snow-covered summit, a grayish-white mountain, and a blue sky dotted with a few white clouds. In front of the snow-covered mountain are rolling hills, and in front of the hills is a barren mountain.", "visual_elements": [ { "content": "The camera focuses on a distant snow-covered mountain.", "type": "camera", "weight": 3 }, { "content": "The top of the mountain is covered with snow.", "type": "scene", "weight": 2 }, { "content": "The mountain appears grayish-white.", "type": "attribute", "weight": 2 }, { "content": "There are a few white clouds in the blue sky.", "type": "attribute", "weight": 2 }, { "content": "In front of the snow-covered mountain is an endless mountain range.", "type": "attribute", "weight": 2 }, { "content": "In front of the mountains is a barren mountain.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves backward and the view opens up more and more, showing more mountain ranges and forested landscapes. The layers of the mountains gradually emerge, with barren slopes in the near distance and towering peaks in the far distance, and vegetation gradually increases on the slopes, with green trees and shrubs covering most of the mountain.", "visual_elements": [ { "content": "As the camera moves back, the view opens up.", "type": "camera", "weight": 3 }, { "content": "More mountains and forests are revealed.", "type": "scene", "weight": 3 }, { "content": "The layers of the mountain range gradually appear.", "type": "attribute", "weight": 3 }, { "content": "The barren slopes in the near distance and the towering peaks in the far distance.", "type": "attribute", "weight": 2 }, { "content": "Vegetation on the slopes gradually increases.", "type": "attribute", "weight": 2 }, { "content": "Green trees and shrubs cover most of the mountain.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0893", "video_path": "MiraData/video_clips/000005057/000005057595.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 24.24, "resolution": { "width": 1280, "height": 720 }, "caption": "The shooting took place at night. At the beginning of the video, the camera focuses on the front of a house. The main door of the house is dark-colored, with two large white labels hanging on it. To the left of the door, there is a rectangular glass window, and a lit lamp hangs from the ceiling in front of the door. On the right side of the house, there are two adjacent large windows, with a row of green underground lights shining beneath them.\nA red double-decker bus approaches from the distance, with the wording \"JR Hospital 13\" displayed above the front, indicating that it is bus number 13 bound for the hospital. As the bus draws closer, it can be seen that the driver in the driver's seat is a blonde woman. The bus's license plate number is displayed as \"MI OXF.\" The red bus passes through the scene and exits the scene.\nBehind the red bus, a purple double-decker bus also approaches from the distance, with the wording \"Oxford City 4A\" displayed above the front, indicating that it is bus number 4A bound for Oxford City. The purple bus's license plate number is \"HB11 OXF.\" The purple bus passes through the scene and exits the scene. The empty street sees no other vehicles, and the white buildings and dead trees in the background remain clearly visible.", "events": [ { "event": "The shooting took place at night. At the beginning of the video, the camera focuses on the front of a house. The main door of the house is dark-colored, with two large white labels hanging on it. To the left of the door, there is a rectangular glass window, and a lit lamp hangs from the ceiling in front of the door. On the right side of the house, there are two adjacent large windows, with a row of green underground lights shining beneath them.", "visual_elements": [ { "content": "The camera focuses on a street.", "type": "camera", "weight": 3 }, { "content": "On the left side of the street is a worn-out stone wall.", "type": "attribute", "weight": 2 }, { "content": "Above the wall is a circle of fencing.", "type": "attribute", "weight": 1 }, { "content": "Inside the fencing stands a large green mountain.", "type": "attribute", "weight": 2 }, { "content": "Neatly arranged streetlights line the left side of the street.", "type": "attribute", "weight": 2 }, { "content": "There are a few white clouds in the sky.", "type": "attribute", "weight": 2 }, { "content": "It's sunny.", "type": "scene", "weight": 2 }, { "content": "Sparse trees stand on both sides of the street in front of the stone wall.", "type": "scene", "weight": 2 }, { "content": "The camera is fixed on the right side of the street.", "type": "camera", "weight": 3 }, { "content": "It captures people or objects in the scene from a low angle.", "type": "camera", "weight": 3 } ] }, { "event": "A red double-decker bus approaches from the distance, with the wording \"JR Hospital 13\" displayed above the front, indicating that it is bus number 13 bound for the hospital. As the bus draws closer, it can be seen that the driver in the driver's seat is a blonde woman. The bus's license plate number is displayed as \"MI OXF.\" The red bus passes through the scene and exits the scene.", "visual_elements": [ { "content": "A red double-decker bus approaches from the distance.", "type": "action", "weight": 3 }, { "content": "Above the front of the bus is the wording \"JR Hospital 13,\" indicating that it is bus number 13 bound for the hospital.", "type": "attribute", "weight": 2 }, { "content": "As the bus draws closer, it can be seen that the driver in the driver's seat is a blonde woman.", "type": "attribute", "weight": 2 }, { "content": "The bus's license plate number is displayed as \"MI OXF.\"", "type": "attribute", "weight": 2 }, { "content": "The red bus passes through the scene and exits the scene.", "type": "action", "weight": 3 } ] }, { "event": "Behind the red bus, a purple double-decker bus also approaches from the distance, with the wording \"Oxford City 4A\" displayed above the front, indicating that it is bus number 4A bound for Oxford City. The purple bus's license plate number is \"HB11 OXF.\" The purple bus passes through the scene and exits the scene. The empty street sees no other vehicles, and the white buildings and dead trees in the background remain clearly visible.", "visual_elements": [ { "content": "Behind the red bus, a purple double-decker bus also approaches from the distance.", "type": "action", "weight": 3 }, { "content": "Above the front of the bus is the wording \"Oxford City 4A,\" indicating that it is bus number 4A bound for Oxford City.", "type": "attribute", "weight": 2 }, { "content": "The purple bus's license plate number is \"HB11 OXF.\"", "type": "attribute", "weight": 2 }, { "content": "The purple bus passes through the scene and exits the scene.", "type": "action", "weight": 3 }, { "content": "The empty street sees no other vehicles.", "type": "attribute", "weight": 2 }, { "content": "The white buildings and dead trees in the background remain clearly visible.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0894", "video_path": "MiraData/video_clips/000005057/000005057778.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 28.92, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on some green plant leaves, which take up most of the frame, with the background blurred.\nAs the camera moves to the right, the background gradually becomes clear, and a square with many bicycles parked appears in the picture. The ground of the square is paved with yellow and gray slabs. There are several bare trees with thin trunks around.\nThen, two men in black jackets and grey pants enter the frame from the right. The man closer to the camera holds a blue container, and the other man further away wears a black hat and looks down at his phone. The two men walk out of the frame to the left front of the frame, and the camera stops moving. In the background, the building on the right side of the frame is black and white. There is a red building with white columns on the left side of the frame.", "events": [ { "event": "The video begins with the camera focused on some green plant leaves, which take up most of the frame, with the background blurred.", "visual_elements": [ { "content": "The camera is focused on some green leaves.", "type": "camera", "weight": 3 } ] }, { "event": "As the camera moves to the right, the background gradually becomes clear, and a square with many bicycles parked appears in the picture. The ground of the square is paved with yellow and gray slabs. There are several bare trees with thin trunks around.", "visual_elements": [ { "content": "As the camera moves to the right, the background gradually becomes clear.", "type": "camera", "weight": 3 }, { "content": "The first thing that appears in the picture is a square with many bicycles parked.", "type": "attribute", "weight": 3 }, { "content": "The ground of the square is paved with yellow and gray slabs.", "type": "attribute", "weight": 2 }, { "content": "There are several bare trees with thin trunks in the square.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, two men in black jackets and grey pants enter the frame from the right. The man closer to the camera holds a blue container, and the other man further away wears a black hat and looks down at his phone. The two men walk out of the frame to the left front of the frame, and the camera stops moving. In the background, the building on the right side of the frame is black and white. There is a red building with white columns on the left side of the frame.", "visual_elements": [ { "content": "Then, two men wearing black jackets and gray pants enter the picture from the right side of the picture.", "type": "attribute", "weight": 3 }, { "content": "The man close to the camera holds a blue container in his hand.", "type": "attribute", "weight": 1 }, { "content": "The other man who is slightly away from the camera wears a black hat and looks down at his mobile phone.", "type": "attribute", "weight": 1 }, { "content": "The two men walk side by side to the left front of the picture, and the camera stops moving.", "type": "action", "weight": 3 }, { "content": "In the background, the building on the right side of the picture is black and white.", "type": "scene", "weight": 2 }, { "content": "There is a red building with white columns on the left side of the picture.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 11 }, { "index": "TUNA_0895", "video_path": "MiraData/video_clips/000005057/000005057833.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 23.0, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a street, with an ancient building in the background. The exterior walls of the building are made of light yellow stone, and the windows are large and symmetrically arranged. There are several people moving around in front of the building. A woman riding a bicycle enters the scene from the right side, wearing a khaki coat, black pants, a black hat, and carrying a black backpack.\nNext, the camera pans to the left, revealing a three-way intersection ahead. The woman continues riding her bicycle, and the ground on the left side of the street is paved with stone bricks. To the left of the stone brick path, there is a staircase leading to a beige building in the rear. A woman dressed in a red down jacket, carrying an orange shopping bag, is walking along the stone brick path with her back to the camera. In front of and to the left of her, there is a man wearing a mask, a dark jacket, and blue jeans. He is walking towards the camera and puts his right hand into his jacket pocket as he walks. In the background, the woman riding the bicycle turns right. To the left of the turn, there is a light pink building. At the turn, a man is running in the opposite direction of the woman on the bicycle.", "events": [ { "event": "At the beginning of the video, the camera focuses on a street, with an ancient building in the background. The exterior walls of the building are made of light yellow stone, and the windows are large and symmetrically arranged. There are several people moving around in front of the building. A woman riding a bicycle enters the scene from the right side, wearing a khaki coat, black pants, a black hat, and carrying a black backpack.", "visual_elements": [ { "content": "The camera focuses on a street.", "type": "camera", "weight": 3 }, { "content": "The background is an ancient building.", "type": "scene", "weight": 2 }, { "content": "The exterior walls of the building are made of light yellow stone.", "type": "attribute", "weight": 2 }, { "content": "The windows are large and symmetrically arranged.", "type": "attribute", "weight": 2 }, { "content": "There are several people moving in front of the building.", "type": "action", "weight": 2 }, { "content": "A woman riding a bicycle enters the scene from the right.", "type": "action", "weight": 3 }, { "content": "The woman wears a khaki coat and black pants, a black hat, and carries a black backpack.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera pans to the left, revealing a three-way intersection ahead. The woman continues riding her bicycle, and the ground on the left side of the street is paved with stone bricks. To the left of the stone brick path, there is a staircase leading to a beige building in the rear. A woman dressed in a red down jacket, carrying an orange shopping bag, is walking along the stone brick path with her back to the camera. In front of and to the left of her, there is a man wearing a mask, a dark jacket, and blue jeans. He is walking towards the camera and puts his right hand into his jacket pocket as he walks. In the background, the woman riding the bicycle turns right. To the left of the turn, there is a light pink building. At the turn, a man is running in the opposite direction of the woman on the bicycle.", "visual_elements": [ { "content": "The camera moves to the left.", "type": "camera", "weight": 3 }, { "content": "A three-way intersection appears ahead.", "type": "attribute", "weight": 2 }, { "content": "The woman continues riding her bicycle forward.", "type": "action", "weight": 2 }, { "content": "The ground on the left side of the street is paved with stone bricks.", "type": "attribute", "weight": 2 }, { "content": "To the left of the stone brick path is a flight of stairs.", "type": "attribute", "weight": 2 }, { "content": "The stairs lead to a beige building behind.", "type": "attribute", "weight": 2 }, { "content": "A woman facing away from the camera wears a red down jacket and carries an orange shopping bag.", "type": "attribute", "weight": 3 }, { "content": "The woman walks along the stone brick path.", "type": "action", "weight": 3 }, { "content": "In front of the woman on the left is a man wearing a mask, a dark jacket, and blue jeans.", "type": "attribute", "weight": 3 }, { "content": "The man walks in the direction of the camera.", "type": "action", "weight": 3 }, { "content": "As he walks, the man puts his right hand into the pocket of his jacket.", "type": "action", "weight": 3 }, { "content": "In the background, the woman riding a bicycle turns right.", "type": "scene", "weight": 2 }, { "content": "To the left of the turn is a light pink building.", "type": "scene", "weight": 2 }, { "content": "At the turn, a man is running.", "type": "action", "weight": 2 }, { "content": "The direction of the man's running is opposite to the movement of the bicycle.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 22 }, { "index": "TUNA_0896", "video_path": "MiraData/video_clips/000005057/000005057927.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 20.0, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of another video sequence, the camera focuses on a city street, with two red double-decker buses parked on the side of the road in the background. The streets are lined with ancient buildings of elegant architecture. On the right side of the scene, there are several areas enclosed by iron fences, containing trees and bicycle racks. A man wearing a dark hoodie and light jeans stands next to the iron fence on the right side of the scene, wearing a hat, slightly bent over, and holding a red skateboard.\nThe man lays down the skateboard and steps onto it with both feet. At this moment, a woman riding a bicycle enters from the right side of the scene, wearing a dark coat and jeans, with a black backpack on her back. She rides her bicycle along the street, gradually moving away from the camera. On the sidewalk on the left side of the scene, a pedestrian is walking away from the camera, with parts of their figure occasionally appearing on the left side of the scene. On the right side of the scene, the man rides his skateboard and performs some simple skateboarding tricks.\nA bus enters the scene from the right side, gradually obscuring the man's figure. The bus has advertisements on its body and a license plate number of SN63 MXZ. After the bus passes, the man's figure reappears in the scene, walking while looking down at his skateboard, which is placed on the ground with its wheels facing up. The full figure of the pedestrian on the left side of the scene is now visible.", "events": [ { "event": "At the beginning of another video sequence, the camera focuses on a city street, with two red double-decker buses parked on the side of the road in the background. The streets are lined with ancient buildings of elegant architecture. On the right side of the scene, there are several areas enclosed by iron fences, containing trees and bicycle racks. A man wearing a dark hoodie and light jeans stands next to the iron fence on the right side of the scene, wearing a hat, slightly bent over, and holding a red skateboard.", "visual_elements": [ { "content": "The camera focuses on a city street.", "type": "camera", "weight": 3 }, { "content": "In the background, two red double-decker buses are parked by the roadside.", "type": "scene", "weight": 2 }, { "content": "Ancient buildings line both sides of the street.", "type": "scene", "weight": 2 }, { "content": "On the right side of the scene are areas enclosed by iron fences.", "type": "attribute", "weight": 2 }, { "content": "Inside the fences are some trees and bicycle racks.", "type": "attribute", "weight": 1 }, { "content": "A man wearing a dark hoodie and light jeans stands next to an iron fence on the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "He wears a hat, bends slightly, and holds a red skateboard with his hand.", "type": "attribute", "weight": 3 } ] }, { "event": "The man lays down the skateboard and steps onto it with both feet. At this moment, a woman riding a bicycle enters from the right side of the scene, wearing a dark coat and jeans, with a black backpack on her back. She rides her bicycle along the street, gradually moving away from the camera. On the sidewalk on the left side of the scene, a pedestrian is walking away from the camera, with parts of their figure occasionally appearing on the left side of the scene. On the right side of the scene, the man rides his skateboard and performs some simple skateboarding tricks.", "visual_elements": [ { "content": "The man lays down the skateboard and steps on it with both feet.", "type": "action", "weight": 3 }, { "content": "A woman riding a bicycle enters from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The woman wears a dark coat and jeans, carrying a black backpack.", "type": "attribute", "weight": 2 }, { "content": "The woman rides her bicycle forward along the street, gradually moving away from the camera.", "type": "action", "weight": 3 }, { "content": "On the sidewalk on the left side of the scene, a pedestrian walks forward facing away from the camera.", "type": "attribute", "weight": 2 }, { "content": "Parts of the pedestrian's figure occasionally appear on the left side of the scene.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the scene, the man rides his skateboard and performs some simple skateboarding tricks.", "type": "attribute", "weight": 2 } ] }, { "event": "A bus enters the scene from the right side, gradually obscuring the man's figure. The bus has advertisements on its body and a license plate number of SN63 MXZ. After the bus passes, the man's figure reappears in the scene, walking while looking down at his skateboard, which is placed on the ground with its wheels facing up. The full figure of the pedestrian on the left side of the scene is now visible.", "visual_elements": [ { "content": "A bus enters the scene from the right side.", "type": "action", "weight": 3 }, { "content": "The bus gradually obscures the man's figure.", "type": "attribute", "weight": 2 }, { "content": "Advertisements are displayed on the bus body.", "type": "attribute", "weight": 2 }, { "content": "The bus license plate number is SN63 MXZ.", "type": "attribute", "weight": 2 }, { "content": "After the bus passes, the man's figure reappears in the scene.", "type": "attribute", "weight": 2 }, { "content": "The man walks while looking down at his skateboard.", "type": "action", "weight": 2 }, { "content": "The skateboard is placed on the ground with its wheels facing up.", "type": "attribute", "weight": 2 }, { "content": "The full figure of the pedestrian on the left side of the scene appears in the scene.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0897", "video_path": "MiraData/video_clips/000005059/000005059453.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 22.2, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a lush, tree-filled area surrounded by iron fences.In the background, there stands a gray building.\nThe camera pans to the right, with the iron fence consistently in the foreground, while the building in the background gradually becomes clearer. The windows of the building are white. The door on the first floor is red, with white graffiti on it.\nAs the camera continues panning to the right, the trunks of three trees outside the fence gradually appear in the foreground. There are some fallen leaves on the ground in front of the building. On the first floor of the building, there is a large white window, with a large green potted plant below it.\nAt the end of the video, the camera keeps panning to the right, and the iron fence completely moves out of the scene. There is a low fence in front of the building. A gray bicycle handle appears in the bottom right corner of the scene, and there are some green plants next to the windows on the ground floor of the building.", "events": [ { "event": "At the beginning of the video, the camera focuses on a lush, tree-filled area surrounded by iron fences.In the background, there stands a gray building.", "visual_elements": [ { "content": "The camera focuses on a lush, tree-filled area.", "type": "camera", "weight": 3 }, { "content": "This area is surrounded by iron fences.", "type": "attribute", "weight": 2 }, { "content": "In the background, there stands a gray building.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans to the right, with the iron fence consistently in the foreground, while the building in the background gradually becomes clearer. The windows of the building are white. The door on the first floor is red, with white graffiti on it.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The iron fence is consistently in the foreground.", "type": "attribute", "weight": 2 }, { "content": "The building in the background gradually becomes clearer.", "type": "scene", "weight": 1 }, { "content": "The windows of the building are white.", "type": "attribute", "weight": 2 }, { "content": "The door on the first floor is red.", "type": "attribute", "weight": 2 }, { "content": "There is white graffiti on the door.", "type": "attribute", "weight": 1 } ] }, { "event": "As the camera continues panning to the right, the trunks of three trees outside the fence gradually appear in the foreground. There are some fallen leaves on the ground in front of the building. On the first floor of the building, there is a large white window, with a large green potted plant below it.", "visual_elements": [ { "content": "The camera continues panning to the right.", "type": "camera", "weight": 3 }, { "content": "The trunks of three trees outside the fence gradually appear in the foreground.", "type": "attribute", "weight": 2 }, { "content": "There are some fallen leaves on the ground in front of the building.", "type": "scene", "weight": 1 }, { "content": "There is a large white window on the first floor of the building.", "type": "attribute", "weight": 2 }, { "content": "There is a large green potted plant below the window,.", "type": "attribute", "weight": 1 } ] }, { "event": "At the end of the video, the camera keeps panning to the right, and the iron fence completely moves out of the scene. There is a low fence in front of the building. A gray bicycle handle appears in the bottom right corner of the scene, and there are some green plants next to the windows on the ground floor of the building.", "visual_elements": [ { "content": "The camera continues panning to the right.", "type": "camera", "weight": 3 }, { "content": "The iron fence completely moves out of the scene.", "type": "attribute", "weight": 2 }, { "content": "There is a low fence in front of the building.", "type": "attribute", "weight": 1 }, { "content": "A gray bicycle handle appears in the bottom right corner of the scene.", "type": "attribute", "weight": 1 }, { "content": "There are some green plants next to the windows on the ground floor of the building.", "type": "attribute", "weight": 1 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0898", "video_path": "MiraData/video_clips/000005059/000005059934.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 22.68, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the ground floor of a multi-story building. The exterior walls of the building are constructed with gray bricks, the ceiling is white, and the windows are neatly arranged. The camera slowly moves to the left, revealing multiple satellite antennas installed on the rooftop of the building. In front of the building, there are several trees with green foliage partially obscuring the structure.\nThe camera continues panning to the left, revealing a gray high-rise building behind the first building. The white windows of the high-rise are densely and neatly arranged. To the left of the scene is a dense forest of trees.\nAs the camera continues to pan to the left, a black pole enters the scene from the left and exits to the right. Beneath the dense trees, there is an ornate fence. As the camera moves to the right, a partial wall constructed with stone bricks, connected to the end of the fence, enters the scene from the left.", "events": [ { "event": "At the beginning of the video, the camera focuses on the ground floor of a multi-story building. The exterior walls of the building are constructed with gray bricks, the ceiling is white, and the windows are neatly arranged. The camera slowly moves to the left, revealing multiple satellite antennas installed on the rooftop of the building. In front of the building, there are several trees with green foliage partially obscuring the structure.", "visual_elements": [ { "content": "The camera focuses on the ground floor of a multi-story building.", "type": "camera", "weight": 3 }, { "content": "The exterior walls of the building are constructed with gray bricks.", "type": "attribute", "weight": 2 }, { "content": "The ceiling is white.", "type": "attribute", "weight": 2 }, { "content": "The windows are neatly arranged.", "type": "attribute", "weight": 2 }, { "content": "The camera slowly moves to the left.", "type": "camera", "weight": 3 }, { "content": "Multiple satellite antennas are installed on the rooftop of the building.", "type": "attribute", "weight": 2 }, { "content": "There are several trees in front of the building.", "type": "attribute", "weight": 2 }, { "content": "Green foliage obscures parts of the building.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning to the left, revealing a gray high-rise building behind the first building. The white windows of the high-rise are densely and neatly arranged. To the left of the scene is a dense forest of trees.", "visual_elements": [ { "content": "The camera continues panning to the left.", "type": "camera", "weight": 3 }, { "content": "Behind the building stands a gray high-rise tower.", "type": "attribute", "weight": 2 }, { "content": "The white windows of the tower are densely and neatly arranged.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the scene is a dense forest.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues to pan to the left, a black pole enters the scene from the left and exits to the right. Beneath the dense trees, there is an ornate fence. As the camera moves to the right, a partial wall constructed with stone bricks, connected to the end of the fence, enters the scene from the left.", "visual_elements": [ { "content": "The camera continues moving to the left.", "type": "camera", "weight": 3 }, { "content": "A black pole enters from the left side of the scene and exits on the right.", "type": "attribute", "weight": 3 }, { "content": "Beneath the dense trees, there is a rusted fence.", "type": "attribute", "weight": 2 }, { "content": "As the camera pans to the right, a partial wall made of stone bricks, connected to the end of the fence, enters the scene from the left.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0899", "video_path": "MiraData/video_clips/000005060/000005060095.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 25.36, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an ancient building. The walls of the building are constructed from light-colored stone, and there are two vertically arranged windows on the wall, fitted with black iron gratings. The camera slowly pans to the right, revealing the full view of a large window comprised of four smaller windows. There are some weathered marks on the wall beside the windows.\nThe camera continues to pan to the right, and a broad road appears in the scene. At the end of the road stands a magnificent building with an archway. On the right side of the road, a person dressed in red walks away from the camera, leading a dog. Behind them stands another person with their back to the camera. On the left side of the road, two individuals dressed in heavy clothing and wearing hats have their hands in their pockets, heads down, walking towards the camera. Behind them is a black gated door, with its right half open. A male is walking towards the area behind the door. To the left-rear of the door stands a green tree.\nThe camera pans upwards and to the right, and the scene of the road exits the scene. The perspective shifts to the upper half of the magnificent building. The upper section features three adjacent large arched windows with cut-out designs inside.", "events": [ { "event": "At the beginning of the video, the camera focuses on an ancient building. The walls of the building are constructed from light-colored stone, and there are two vertically arranged windows on the wall, fitted with black iron gratings. The camera slowly pans to the right, revealing the full view of a large window comprised of four smaller windows. There are some weathered marks on the wall beside the windows.", "visual_elements": [ { "content": "The camera focuses on an ancient building.", "type": "camera", "weight": 3 }, { "content": "The walls of the building are constructed from light-colored stone.", "type": "attribute", "weight": 2 }, { "content": "There are two vertically arranged windows on the wall.", "type": "attribute", "weight": 2 }, { "content": "The windows are fitted with black iron gratings.", "type": "attribute", "weight": 2 }, { "content": "The camera slowly pans to the right.", "type": "camera", "weight": 3 }, { "content": "The full view of a large window comprised of four smaller windows appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "There are some weathered marks on the wall beside the windows.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to pan to the right, and a broad road appears in the scene. At the end of the road stands a magnificent building with an archway. On the right side of the road, a person dressed in red walks away from the camera, leading a dog. Behind them stands another person with their back to the camera. On the left side of the road, two individuals dressed in heavy clothing and wearing hats have their hands in their pockets, heads down, walking towards the camera. Behind them is a black gated door, with its right half open. A male is walking towards the area behind the door. To the left-rear of the door stands a green tree.", "visual_elements": [ { "content": "The camera continues to pan to the right.", "type": "camera", "weight": 3 }, { "content": "A broad road appears in the scene.", "type": "scene", "weight": 3 }, { "content": "At the end of the road stands a magnificent building.", "type": "attribute", "weight": 2 }, { "content": "This building has an archway.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road, a person dressed in red walks away from the camera.", "type": "attribute", "weight": 2 }, { "content": "This person is leading a dog as they walk forward.", "type": "action", "weight": 2 }, { "content": "Behind the person dressed in red stands another person with their back to the camera.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, two individuals are dressed in heavy clothing and wearing hats.", "type": "attribute", "weight": 2 }, { "content": "They have their hands in their pockets, heads down, walking towards the camera.", "type": "action", "weight": 2 }, { "content": "Behind them is a black gated door.", "type": "attribute", "weight": 2 }, { "content": "The right half of the door is open.", "type": "attribute", "weight": 1 }, { "content": "Next to the door, a male is walking towards the area behind it.", "type": "action", "weight": 2 }, { "content": "To the left-rear of the door stands a green tree.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans upwards and to the right, and the scene of the road exits the scene. The perspective shifts to the upper half of the magnificent building. The upper section features three adjacent large arched windows with cut-out designs inside.", "visual_elements": [ { "content": "The camera pans upwards and to the right.", "type": "camera", "weight": 3 }, { "content": "The scene of the road exits the scene.", "type": "attribute", "weight": 2 }, { "content": "The perspective shifts to the upper half of the magnificent building.", "type": "camera", "weight": 3 }, { "content": "The upper part features three adjacent large arched windows.", "type": "attribute", "weight": 2 }, { "content": "The windows have cut-out designs inside.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 25 }, { "index": "TUNA_0900", "video_path": "MiraData/video_clips/000005060/000005060153.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 29.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the start of the video, the camera focuses on a modern building with a small stone wall at the entrance, featuring the sign \"Senedd Cymru Welsh Parliament\". The camera pans slowly from left to right, revealing the full view of the building. In front of the building, there is a row of stone steps, leading up to a long stone bench. The outer wall on the left side of the building is mainly made of glass.\nThe camera pans to the right, gradually bringing different parts of the building into view. On the left side of the scene, there is a large glass curtain wall. The stone wall at the entrance and the glass building on the left are surrounded by metal railings. Behind the building, there are several red-brick buildings with many neatly arranged glass windows. To the right-rear of the red buildings, there are some green plants and white buildings.\nAt the end of the video, the camera stops at the right side of the building. Throughout the video, the camera pans smoothly, showcasing various angles and details of the building.", "events": [ { "event": "At the start of the video, the camera focuses on a modern building with a small stone wall at the entrance, featuring the sign \"Senedd Cymru Welsh Parliament\". The camera pans slowly from left to right, revealing the full view of the building. In front of the building, there is a row of stone steps, leading up to a long stone bench. The outer wall on the left side of the building is mainly made of glass.", "visual_elements": [ { "content": "The camera focuses on a modern building.", "type": "camera", "weight": 3 }, { "content": "There is a small stone wall in front of the building's entrance,.", "type": "attribute", "weight": 2 }, { "content": "On the stone wall, there is a sign that reads \"Senedd Cymru Welsh Parliament\".", "type": "attribute", "weight": 2 }, { "content": "The camera slowly pans from left to right.", "type": "camera", "weight": 3 }, { "content": "There is a row of stone steps in front of the building.", "type": "scene", "weight": 2 }, { "content": "Above the steps, there is a long stone bench.", "type": "attribute", "weight": 2 }, { "content": "The outer wall on the left side of the building is mainly made of glass.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans to the right, gradually bringing different parts of the building into view. On the left side of the scene, there is a large glass curtain wall. The stone wall at the entrance and the glass building on the left are surrounded by metal railings. Behind the building, there are several red-brick buildings with many neatly arranged glass windows. To the right-rear of the red buildings, there are some green plants and white buildings.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "On the left side of the scene, there is a large glass curtain wall.", "type": "attribute", "weight": 2 }, { "content": "The stone wall and the glass building on the left are surrounded by metal railings.", "type": "attribute", "weight": 1 }, { "content": "Behind the main building, there are several red-brick buildings.", "type": "scene", "weight": 2 }, { "content": "These buildings feature many neatly arranged glass windows.", "type": "attribute", "weight": 2 }, { "content": "To the right-rear of the red-brick buildings, there are some green plants and some white buildings.", "type": "attribute", "weight": 2 } ] }, { "event": "At the end of the video, the camera stops at the right side of the building. Throughout the video, the camera pans smoothly, showcasing various angles and details of the building.", "visual_elements": [ { "content": "The camera stops at the right side of the building.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0901", "video_path": "MiraData/video_clips/000005060/000005060266.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 26.96, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera overlooks a high-rise building under construction from a high altitude. The main structure of the high-rise building is completed, but the exterior walls of the upper half and the overall interior decoration are unfinished. There is a yellow tower crane on the left side of the high-rise building, with its boom extending to the left. In the background, there are some low-rise buildings and green trees.\nThe camera slowly rotates to the right, shifting from the front of the building to its right side. In the background, several tall buildings appear, and the sky in the distance is hazy. A green safety net covers some areas at the bottom right of the building under construction.\nThe camera continues to rotate to the right, and a river appears in the upper left corner of the screen. Across the river, a cluster of high-rise and low-rise buildings are scattered.\nThe camera keeps rotating to the right and stops at the right side of the building under construction. The buildings on both sides of the river face each other, and the red roofs of some houses in the lower part of the screen add color to the video.", "events": [ { "event": "At the beginning of the video, the camera overlooks a high-rise building under construction from a high altitude. The main structure of the high-rise building is completed, but the exterior walls of the upper half and the overall interior decoration are unfinished. There is a yellow tower crane on the left side of the high-rise building, with its boom extending to the left. In the background, there are some low-rise buildings and green trees.", "visual_elements": [ { "content": "The camera overlooks a high-rise building under construction from a high altitude.", "type": "camera", "weight": 3 }, { "content": "The main structure of the high-rise building is completed.", "type": "attribute", "weight": 2 }, { "content": "The exterior walls of the upper half and the overall interior decoration are unfinished.", "type": "attribute", "weight": 1 }, { "content": "There is a yellow tower crane on the left side of the high-rise building.", "type": "attribute", "weight": 2 }, { "content": "The boom of the tower crane extends to the left.", "type": "attribute", "weight": 2 }, { "content": "In the background, there are some low-rise buildings and green trees.", "type": "scene", "weight": 2 } ] }, { "event": "The camera slowly rotates to the right, shifting from the front of the building to its right side. In the background, several tall buildings appear, and the sky in the distance is hazy. A green safety net covers some areas at the bottom right of the building under construction.", "visual_elements": [ { "content": "The camera slowly rotates to the right.", "type": "camera", "weight": 3 }, { "content": "The camera shifts from the front of the building to its right side.", "type": "camera", "weight": 3 }, { "content": "In the background, several tall buildings appear.", "type": "scene", "weight": 1 }, { "content": "The sky in the distance is hazy.", "type": "scene", "weight": 1 }, { "content": "A green safety net covers some areas at the bottom right of the building under construction.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to rotate to the right, and a river appears in the upper left corner of the screen. Across the river, a cluster of high-rise and low-rise buildings are scattered.", "visual_elements": [ { "content": "The camera continues to rotate to the right.", "type": "camera", "weight": 3 }, { "content": "A river appears in the upper left corner of the scene.", "type": "attribute", "weight": 2 }, { "content": "Across the river, a cluster of high-rise and low-rise buildings are scattered.", "type": "scene", "weight": 2 } ] }, { "event": "The camera keeps rotating to the right and stops at the right side of the building under construction. The buildings on both sides of the river face each other, and the red roofs of some houses in the lower part of the screen add color to the video.", "visual_elements": [ { "content": "The camera keeps rotating to the right.", "type": "camera", "weight": 3 }, { "content": "The scene stops at the right side of the building under construction.", "type": "camera", "weight": 3 }, { "content": "The buildings on both sides of the river face each other.", "type": "scene", "weight": 1 }, { "content": "The roofs of some houses in the lower part of the scene are red.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 18 }, { "index": "TUNA_0902", "video_path": "MiraData/video_clips/000005060/000005060800.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 26.56, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, a withered, massive tree trunk appears in the foreground, occupying the left side of the scene, while the right side of the scene features the enormous windows of a building. Some leafless, withered branches in front of the building obscure parts of the windows.\nThe camera moves to the right, and the tree trunk exits the scene. The large windows of the building are Gothic-styled, with exquisite carved decorations on the window scenes. The walls of the building are made of light-colored stone, and there are some decorative spires and carvings on the rooftop next to the windows.\nThe camera continues to zoom in and pan upwards. The top of the large windows forms an arched structure. The top of the building features multiple spires and decorative railings, with crosses adorning the spires. The background is a clear blue sky.", "events": [ { "event": "At the beginning of the video, a withered, massive tree trunk appears in the foreground, occupying the left side of the scene, while the right side of the scene features the enormous windows of a building. Some leafless, withered branches in front of the building obscure parts of the windows.", "visual_elements": [ { "content": "A withered, massive tree trunk appears in the foreground, occupying the left side of the scene.", "type": "scene", "weight": 3 }, { "content": "The right side of the scene features the enormous windows of a building.", "type": "attribute", "weight": 3 }, { "content": "Some leafless, withered branches in front of the building obscure parts of the windows.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera moves to the right, and the tree trunk exits the scene. The large windows of the building are Gothic-styled, with exquisite carved decorations on the window scenes. The walls of the building are made of light-colored stone, and there are some decorative spires and carvings on the rooftop next to the windows.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "The tree trunk exits the scene.", "type": "attribute", "weight": 2 }, { "content": "The large windows of the building are Gothic-styled.", "type": "attribute", "weight": 2 }, { "content": "There are exquisite carved decorations on the window scenes.", "type": "attribute", "weight": 1 }, { "content": "The walls of the building are made of light-colored stone.", "type": "attribute", "weight": 2 }, { "content": "There are some decorative spires and carvings on the rooftop next to the windows.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to zoom in and pan upwards. The top of the large windows forms an arched structure. The top of the building features multiple spires and decorative railings, with crosses adorning the spires. The background is a clear blue sky.", "visual_elements": [ { "content": "The camera continues to zoom in and pan upwards.", "type": "camera", "weight": 3 }, { "content": "The top of the large windows forms an arched structure.", "type": "attribute", "weight": 2 }, { "content": "The top of the building features multiple spires and decorative railings.", "type": "attribute", "weight": 2 }, { "content": "Crosses adorn the spires.", "type": "attribute", "weight": 1 }, { "content": "The background is a clear blue sky.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0903", "video_path": "MiraData/video_clips/000005060/000005060810.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 24.68, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a narrow alleyway, with tall brick wall buildings on both sides. A row of lights are installed on the outer wall of the building on the left. At the end of the alleyway, there is a river, and beside it, there are a red circular signboard and a white pentagonal signboard.\nThe camera pans forward a short distance and then moves up. Across the river, a modern multi-story building can be seen. The outer wall of the building is red, the roof is gray, and there are metal railings on the balconies. The distant background consists of trees and the sky.\nThe camera continues to move up, and the perspective gradually shifts from the middle of the building to the top. At this point, the end of the alleyway and the buildings across the river gradually disappear from the screen, replaced by the upper structures of the buildings on both sides and the sky. The sky is light blue with almost no clouds.\nThe camera finally stops at a high angle, with the perspective completely focused on the sky. The buildings and trees in the background gradually disappear as the camera pans up, leaving only the sky and part of the building's walls on the screen.", "events": [ { "event": "At the beginning of the video, the camera focuses on a narrow alleyway, with tall brick wall buildings on both sides. A row of lights are installed on the outer wall of the building on the left. At the end of the alleyway, there is a river, and beside it, there are a red circular signboard and a white pentagonal signboard.", "visual_elements": [ { "content": "The camera focuses on a narrow alleyway.", "type": "camera", "weight": 3 }, { "content": "Tall brick wall buildings stand on both sides of the alleyway.", "type": "attribute", "weight": 2 }, { "content": "A row of lights are installed on the outer wall of the building on the left.", "type": "attribute", "weight": 2 }, { "content": "At the end of the alleyway, there is a river.", "type": "attribute", "weight": 2 }, { "content": "Beside the river, there are a red circular signboard and a white pentagonal signboard.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera pans forward a short distance and then moves up. Across the river, a modern multi-story building can be seen. The outer wall of the building is red, the roof is gray, and there are metal railings on the balconies. The distant background consists of trees and the sky.", "visual_elements": [ { "content": "The camera pans forward a short distance.", "type": "camera", "weight": 3 }, { "content": "The camera then moves up.", "type": "camera", "weight": 3 }, { "content": "Across the river, a modern multi-story building can be seen.", "type": "attribute", "weight": 2 }, { "content": "The outer wall of the building is red.", "type": "attribute", "weight": 1 }, { "content": "The roof is gray.", "type": "attribute", "weight": 1 }, { "content": "There are metal railings on the balconies.", "type": "attribute", "weight": 1 }, { "content": "The distant background consists of trees and the sky.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to move up, and the perspective gradually shifts from the middle of the building to the top. At this point, the end of the alleyway and the buildings across the river gradually disappear from the screen, replaced by the upper structures of the buildings on both sides and the sky. The sky is light blue with almost no clouds.", "visual_elements": [ { "content": "The camera continues to pan up.", "type": "camera", "weight": 3 }, { "content": "The perspective gradually shifts from the middle of the building to the top.", "type": "camera", "weight": 3 }, { "content": "The end of the alleyway and the buildings across the river gradually disappear from the scene.", "type": "attribute", "weight": 2 }, { "content": "In the scene, there are the upper structures of the buildings on both sides and the sky.", "type": "attribute", "weight": 2 }, { "content": "The sky is light blue.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera finally stops at a high angle, with the perspective completely focused on the sky. The buildings and trees in the background gradually disappear as the camera pans up, leaving only the sky and part of the building's walls on the screen.", "visual_elements": [ { "content": "The camera finally stops at a high angle.", "type": "camera", "weight": 3 }, { "content": "The perspective is completely focused on the sky.", "type": "camera", "weight": 3 }, { "content": "Only the sky and part of the building's walls remain on the scene.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_0904", "video_path": "MiraData/video_clips/000005061/000005061290.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 26.2, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a narrow arched passageway, with stone and brick walls on both sides. To the left of the archway stands a neatly trimmed shrub, and to the right, a round flowerpot filled with green plants. The camera moves slowly forward, passing through the archway and entering the passageway. The floor of the passageway is paved with stone slabs, and a blue door is embedded on the left side. One side of the wall next to the door is painted white. At the end of the path, part of a large building can be seen.\nAs the camera pans forward, it also lifts upwards, leaving the passageway. A path enters the scene, with a garden area on the right side. The outer wall scene of the garden is mainly composed of red bricks, with wooden wall panels and black guardrails on it. Inside the wall is lush green vegetation. On the left side of the path is exposed vegetation. The outer wall of the building ahead has large Gothic windows, with exquisite stone carvings above the windows.\nThe camera continues to pan forward while lifting the shot to the left and upper side. The full view of the large building enters the scene. It is a magnificent Gothic cathedral, with tall spires and beautiful windows on its facade. The spires reach high into the sky. The sky is overcast.", "events": [ { "event": "At the beginning of the video, the camera focuses on a narrow arched passageway, with stone and brick walls on both sides. To the left of the archway stands a neatly trimmed shrub, and to the right, a round flowerpot filled with green plants. The camera moves slowly forward, passing through the archway and entering the passageway. The floor of the passageway is paved with stone slabs, and a blue door is embedded on the left side. One side of the wall next to the door is painted white. At the end of the path, part of a large building can be seen.", "visual_elements": [ { "content": "The shot focuses on a narrow arched passageway.", "type": "camera", "weight": 3 }, { "content": "The walls on both sides of the passageway are stone and brick.", "type": "attribute", "weight": 2 }, { "content": "To the left of the archway stands a neatly trimmed shrub.", "type": "attribute", "weight": 2 }, { "content": "To the right, a round flowerpot is filled with green plants.", "type": "attribute", "weight": 2 }, { "content": "The camera pans slowly forward, passing through the archway and entering the passageway.", "type": "camera", "weight": 3 }, { "content": "The floor of the passageway is paved with stone slabs.", "type": "attribute", "weight": 1 }, { "content": "A blue door is embedded on the left side of the passageway.", "type": "attribute", "weight": 2 }, { "content": "One side of the wall next to the door is painted white.", "type": "attribute", "weight": 2 }, { "content": "At the end of the path, part of a large building can be seen.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera pans forward, it also lifts upwards, leaving the passageway. A path enters the scene, with a garden area on the right side. The outer wall scene of the garden is mainly composed of red bricks, with wooden wall panels and black guardrails on it. Inside the wall is lush green vegetation. On the left side of the path is exposed vegetation. The outer wall of the building ahead has large Gothic windows, with exquisite stone carvings above the windows.", "visual_elements": [ { "content": "As the camera pans forward, it also lifts upwards, leaving the passageway.", "type": "camera", "weight": 3 }, { "content": "A path enters the scene.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the path is a garden area.", "type": "attribute", "weight": 2 }, { "content": "The outer wall scene of the garden is mainly composed of red bricks.", "type": "attribute", "weight": 2 }, { "content": "There are also wooden wall panels and black guardrails on the walls.", "type": "attribute", "weight": 2 }, { "content": "Inside the wall is lush green vegetation.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the path is exposed vegetation.", "type": "attribute", "weight": 2 }, { "content": "The outer wall of the building ahead has large Gothic windows.", "type": "attribute", "weight": 2 }, { "content": "With exquisite stone carvings above the windows.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to pan forward while lifting the shot to the left and upper side. The full view of the large building enters the scene. It is a magnificent Gothic cathedral, with tall spires and beautiful windows on its facade. The spires reach high into the sky. The sky is overcast.", "visual_elements": [ { "content": "The camera continues to pan forward while lifting the shot to the left and upper side.", "type": "camera", "weight": 3 }, { "content": "The full view of a large building enters the scene.", "type": "attribute", "weight": 3 }, { "content": "This is a magnificent Gothic cathedral.", "type": "attribute", "weight": 2 }, { "content": "The front of the cathedral boasts tall spires and exquisite windows.", "type": "attribute", "weight": 2 }, { "content": "The spires reach high into the cloudy sky.", "type": "attribute", "weight": 1 }, { "content": "The sky is overcast.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 24 }, { "index": "TUNA_0905", "video_path": "MiraData/video_clips/000005061/000005061301.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 25.24, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the upper section of a tall building, with a circular platform atop the roof. In the center of the platform, there stands a pole stabilized by four antennas. The exterior wall of the building boasts a modern design of glass and metal, with neatly arranged windows. To the left of the building in the background, there are two spire-roofed structures, and the sky is bright with a few scattered white clouds.\nThe camera pans to the right, shifting the view from the front of the tall building to its right side. This wall of the building is painted in white and blue hues, and sunlight flickers through the hollows of the circular platform. In the back-right corner of the scene, there stands another spire-roofed building. The ground below the building is lined with white arcuate brackets, each adorned with numerous small arcuate forks.\nContinuing to pan right, the upper half of a large crane enters from the right side of the screen. Suspended from the top of the crane is a red hook, and its boom stretches high into the sky. The cityscape in the background is particularly beautiful under the setting sun, with distant mountain ranges visible in the distance. The scene halts as the camera reaches the right-rear corner of the tall building.", "events": [ { "event": "At the beginning of the video, the camera focuses on the upper section of a tall building, with a circular platform atop the roof. In the center of the platform, there stands a pole stabilized by four antennas. The exterior wall of the building boasts a modern design of glass and metal, with neatly arranged windows. To the left of the building in the background, there are two spire-roofed structures, and the sky is bright with a few scattered white clouds.", "visual_elements": [ { "content": "The camera focuses on the upper floors of a tall building.", "type": "camera", "weight": 3 }, { "content": "On the rooftop, there is a circular platform.", "type": "attribute", "weight": 3 }, { "content": "In the center of the platform, there stands a pillar.", "type": "attribute", "weight": 2 }, { "content": "The pillar is secured by four antennas.", "type": "attribute", "weight": 1 }, { "content": "The exterior walls of the building feature a modern combination of glass and metal structures.", "type": "attribute", "weight": 2 }, { "content": "The windows are neatly arranged.", "type": "attribute", "weight": 1 }, { "content": "To the left of the building in the background, there are two spire-topped structures.", "type": "scene", "weight": 1 }, { "content": "The sun is shining brightly, with a few scattered white clouds floating in the sky.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans to the right, shifting the view from the front of the tall building to its right side. This wall of the building is painted in white and blue hues, and sunlight flickers through the hollows of the circular platform. In the back-right corner of the scene, there stands another spire-roofed building. The ground below the building is lined with white arcuate brackets, each adorned with numerous small arcuate forks.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The perspective shifts from the front of the tall building to its right side.", "type": "camera", "weight": 3 }, { "content": "This wall of the building is painted in white and blue hues.", "type": "attribute", "weight": 1 }, { "content": "Sunlight flickers through the hollowed-out circular platform of the building.", "type": "attribute", "weight": 2 }, { "content": "There is a spire building in the back right of the scene.", "type": "attribute", "weight": 1 }, { "content": "The ground below the building is lined with white arcuate brackets.", "type": "attribute", "weight": 1 }, { "content": "Each bracket is adorned with numerous small arcuate branches.", "type": "attribute", "weight": 1 } ] }, { "event": "Continuing to pan right, the upper half of a large crane enters from the right side of the screen. Suspended from the top of the crane is a red hook, and its boom stretches high into the sky. The cityscape in the background is particularly beautiful under the setting sun, with distant mountain ranges visible in the distance. The scene halts as the camera reaches the right-rear corner of the tall building.", "visual_elements": [ { "content": "The camera continues to pan to the right.", "type": "camera", "weight": 3 }, { "content": "The upper part of a large crane enters the scene from the right.", "type": "attribute", "weight": 3 }, { "content": "At the top of the crane hangs a red hook.", "type": "attribute", "weight": 2 }, { "content": "The crane's boom stretches high into the sky.", "type": "attribute", "weight": 2 }, { "content": "The sunset bathes the cityscape in its glow.", "type": "scene", "weight": 1 }, { "content": "In the distance, there are also continuous mountain ranges.", "type": "scene", "weight": 1 }, { "content": "The scene pauses as the camera reaches the building's right rear corner.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0906", "video_path": "MiraData/video_clips/000005061/000005061304.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 22.76, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the base of a Gothic building. The stone walls of the building are white, with multiple arched windows on them. The windows have intricate patterns. In front of the building is a green lawn. At the base of the building are some small doors, equipped with black latticed iron gates.\nAs the camera rotates to the left, it also lifts upwards. The top of the building gradually enters the scene. There are some connecting corridors and arches between different parts of the building. The top has some spires and decorative railings. The camera finally stops at the top of a tower, against a background of light blue sky.", "events": [ { "event": "At the beginning of the video, the camera focuses on the base of a Gothic building. The stone walls of the building are white, with multiple arched windows on them. The windows have intricate patterns. In front of the building is a green lawn. At the base of the building are some small doors, equipped with black latticed iron gates.", "visual_elements": [ { "content": "The camera focuses on the base of a Gothic structure.", "type": "camera", "weight": 3 }, { "content": "The stone walls of the building are white.", "type": "attribute", "weight": 2 }, { "content": "There are multiple arched windows on the walls.", "type": "attribute", "weight": 2 }, { "content": "Intricate patterns are etched on the windows.", "type": "attribute", "weight": 1 }, { "content": "In front of the building lies a stretch of green grass.", "type": "attribute", "weight": 2 }, { "content": "At the base of the building, there are several small doors.", "type": "attribute", "weight": 2 }, { "content": "The small doors are equipped with black lattice iron gates.", "type": "attribute", "weight": 1 } ] }, { "event": "As the camera rotates to the left, it also lifts upwards. The top of the building gradually enters the scene. There are some connecting corridors and arches between different parts of the building. The top has some spires and decorative railings. The camera finally stops at the top of a tower, against a background of light blue sky.", "visual_elements": [ { "content": "As the camera rotates to the left, it also tilts upwards.", "type": "camera", "weight": 3 }, { "content": "the top of the building gradually comes into view.", "type": "attribute", "weight": 2 }, { "content": "There are connecting corridors and arches between different parts of the building.", "type": "attribute", "weight": 2 }, { "content": "The top features several spires and decorative railings.", "type": "attribute", "weight": 2 }, { "content": "The camera finally stops at the top of the tall tower.", "type": "camera", "weight": 3 }, { "content": "The background is a light blue sky.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0907", "video_path": "MiraData/video_clips/000005061/000005061327.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 29.36, "resolution": { "width": 1280, "height": 720 }, "caption": "The video starts anew with the camera overlooking the entire city from above, focusing on a tall Gothic building positioned centrally in the scene. The building exudes the texture of gray stone. Surrounding it are dense clusters of urban structures, with a Y-shaped road to its right where vehicles are traveling.\nThe camera pans to the left, revealing blue-roofed houses in front of the Gothic building, and adjacent houses topped with black sunshades. The majority of the buildings in the background are low-rise, primarily painted in red and white hues. Green vegetation dots the area between buildings.\nAs the camera continues to pan left, it shifts from the front of the Gothic building to its left side, revealing an even broader urban landscape. In the distance, undulating hills are visible, with the central slopes covered in green grass. The sky is filled with numerous clouds.\nThe camera halts at a high vantage point, offering an aerial view of the entire city. The buildings are densely packed, and streets crisscross each other, with distant hills and the sky forming the backdrop.", "events": [ { "event": "The video starts anew with the camera overlooking the entire city from above, focusing on a tall Gothic building positioned centrally in the scene. The building exudes the texture of gray stone. Surrounding it are dense clusters of urban structures, with a Y-shaped road to its right where vehicles are traveling.", "visual_elements": [ { "content": "The camera zooms out to aerial view of the entire city.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on a tall Gothic building.", "type": "camera", "weight": 3 }, { "content": "This building is located in the center of the scene.", "type": "attribute", "weight": 2 }, { "content": "The building exudes a sense of gray stone texture.", "type": "attribute", "weight": 2 }, { "content": "Surrounding the building are dense clusters of urban buildings.", "type": "scene", "weight": 2 }, { "content": "The road to the right of the building forms a Y-shape.", "type": "attribute", "weight": 2 }, { "content": "with vehicles traveling along the streets.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans to the left, revealing blue-roofed houses in front of the Gothic building, and adjacent houses topped with black sunshades. The majority of the buildings in the background are low-rise, primarily painted in red and white hues. Green vegetation dots the area between buildings.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The rooftops of the houses in front of the Gothic building are blue.", "type": "attribute", "weight": 1 }, { "content": "The adjacent rooftops are covered with black sunshades.", "type": "attribute", "weight": 1 }, { "content": "The buildings in the background are mostly low-rise,", "type": "attribute", "weight": 2 }, { "content": "with wall colors mainly in red and white.", "type": "attribute", "weight": 2 }, { "content": "Green vegetation dots the area between the buildings.", "type": "scene", "weight": 2 } ] }, { "event": "As the camera continues to pan left, it shifts from the front of the Gothic building to its left side, revealing an even broader urban landscape. In the distance, undulating hills are visible, with the central slopes covered in green grass. The sky is filled with numerous clouds.", "visual_elements": [ { "content": "The camera continues to pan to the left.", "type": "camera", "weight": 3 }, { "content": "shifting the view from the front of the Gothic building to its left side.", "type": "camera", "weight": 3 }, { "content": "A broader cluster of urban buildings appears in the scene,", "type": "attribute", "weight": 1 }, { "content": "with distant rolling hills in the background.", "type": "scene", "weight": 2 }, { "content": "The hill in the center is covered with green grass.", "type": "scene", "weight": 1 }, { "content": "The sky is relatively cloudy.", "type": "scene", "weight": 1 } ] }, { "event": "The camera halts at a high vantage point, offering an aerial view of the entire city. The buildings are densely packed, and streets crisscross each other, with distant hills and the sky forming the backdrop.", "visual_elements": [ { "content": "The camera settles at a high vantage point.", "type": "camera", "weight": 3 }, { "content": "The buildings in the city are densely packed.", "type": "attribute", "weight": 2 }, { "content": "The streets are criss-crossed.", "type": "attribute", "weight": 1 }, { "content": "In the distance, hills and the sky form the backdrop of the scene.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0908", "video_path": "MiraData/video_clips/000005061/000005061339.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 22.48, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the top of a Gothic-style building, which exhibits a reddish-brown stone structure and an overall symmetrical appearance. Multiple spires and exquisite carved decorations adorn the rooftop. A few white clouds float in the blue sky.\nThe camera pans upwards, revealing the octagonal attic of the building in the scene. Each intersection of the octagon is adorned with a spire. The rooftop is filled with shelves, cabinets, and antennas. In the background, a wide river crosses the video, with dense urban buildings lining both sides of the riverbank.\nThe camera continues to ascend, with the octagonal attic of the Gothic-style building remaining the centerpiece of the scene. On both sides below the building are parking lots filled with vehicles. The urban buildings in the background become even more visible, interspersed with green lawns and landscaped areas. Traffic flows continuously on the city's roads.", "events": [ { "event": "At the beginning of the video, the camera focuses on the top of a Gothic-style building, which exhibits a reddish-brown stone structure and an overall symmetrical appearance. Multiple spires and exquisite carved decorations adorn the rooftop. A few white clouds float in the blue sky.", "visual_elements": [ { "content": "At the beginning of the video, the camera focuses on the top of a Gothic-style building.", "type": "camera", "weight": 3 }, { "content": "The building exhibits a reddish-brown stone structure.", "type": "attribute", "weight": 2 }, { "content": "The building has an overall symmetrical appearance.", "type": "attribute", "weight": 2 }, { "content": "Multiple spires and exquisite carved decorations adorn the rooftop.", "type": "scene", "weight": 2 }, { "content": "A few white clouds float in the blue sky.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans upwards, revealing the octagonal attic of the building in the scene. Each intersection of the octagon is adorned with a spire. The rooftop is filled with shelves, cabinets, and antennas. In the background, a wide river crosses the video, with dense urban buildings lining both sides of the riverbank.", "visual_elements": [ { "content": "The camera pans upwards.", "type": "camera", "weight": 3 }, { "content": "The octagonal attic of the building appears in the scene.", "type": "scene", "weight": 3 }, { "content": "Each intersection of the octagon is adorned with a spire.", "type": "scene", "weight": 2 }, { "content": "The rooftop is filled with shelves, cabinets, and antennas.", "type": "scene", "weight": 2 }, { "content": "In the background, a wide river crosses the video.", "type": "scene", "weight": 2 }, { "content": "Dense urban buildings line both sides of the riverbank.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to ascend, with the octagonal attic of the Gothic-style building remaining the centerpiece of the scene. On both sides below the building are parking lots filled with vehicles. The urban buildings in the background become even more visible, interspersed with green lawns and landscaped areas. Traffic flows continuously on the city's roads.", "visual_elements": [ { "content": "The camera continues to ascend.", "type": "camera", "weight": 3 }, { "content": "The octagonal attic of the Gothic-style building remains the centerpiece of the scene.", "type": "scene", "weight": 2 }, { "content": "On both sides below the building are parking lots filled with vehicles.", "type": "scene", "weight": 2 }, { "content": "The urban buildings in the background become even more visible.", "type": "scene", "weight": 1 }, { "content": "Green lawns and landscaped areas are interspersed among the buildings.", "type": "scene", "weight": 2 }, { "content": "Traffic flows continuously on the city's roads.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 17 }, { "index": "TUNA_0909", "video_path": "MiraData/video_clips/000005061/000005061422.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 20.36, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a green meadow with a path in the foreground and a wooden bench on the path. The bench is dark brown in color and has some wear on the surface. There are black metal brackets on both sides of the bench. There are several bare trees in the background, and the shadows of the trees are cast on the grass in the sun. In the distance is a row of white buildings with neat windows.\nThe camera slowly moves to the right, and the backs of a man and a woman sitting on the grass appear on the right side of the screen. The man is wearing a dark coat, and the woman is wearing a light coat with black spots on it. There is a bag and some items beside them. In the background, there are several people on the grass in the distance. Some of them are playing ball, and some are walking. There is also a person sitting on a bench in the distance, with his back to the camera.", "events": [ { "event": "The video begins with the camera focused on a green meadow with a path in the foreground and a wooden bench on the path. The bench is dark brown in color and has some wear on the surface. There are black metal brackets on both sides of the bench. There are several bare trees in the background, and the shadows of the trees are cast on the grass in the sun. In the distance is a row of white buildings with neat windows.", "visual_elements": [ { "content": "The camera is focused on a green meadow.", "type": "camera", "weight": 3 }, { "content": "There is a path in the foreground.", "type": "attribute", "weight": 1 }, { "content": "There is a wooden bench on the path.", "type": "attribute", "weight": 3 }, { "content": "The bench is dark brown and has some wear on the surface.", "type": "attribute", "weight": 2 }, { "content": "There are black metal brackets on both sides of the bench.", "type": "attribute", "weight": 1 }, { "content": "There are several bare trees in the background.", "type": "scene", "weight": 3 }, { "content": "The shadows of the trees are cast on the grass in the sun.", "type": "scene", "weight": 1 }, { "content": "In the distance is a row of white buildings with neat windows.", "type": "scene", "weight": 2 } ] }, { "event": "The camera slowly moves to the right, and the backs of a man and a woman sitting on the grass appear on the right side of the screen. The man is wearing a dark coat, and the woman is wearing a light coat with black spots on it. There is a bag and some items beside them. In the background, there are several people on the grass in the distance. Some of them are playing ball, and some are walking. There is also a person sitting on a bench in the distance, with his back to the camera.", "visual_elements": [ { "content": "The camera moves slowly to the right.", "type": "camera", "weight": 3 }, { "content": "The backs of a man and a woman sitting on the grass appear on the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man is wearing a dark coat and the woman is wearing a light coat with black spots.", "type": "attribute", "weight": 2 }, { "content": "There is a bag and some items beside them.", "type": "attribute", "weight": 1 }, { "content": "In the background, there are several people on the grass in the distance.", "type": "scene", "weight": 2 }, { "content": "Some of the people in the distance are playing ball, and some are taking a walk.", "type": "scene", "weight": 1 }, { "content": "There is a person sitting on a bench in the distance with his back to the camera", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0910", "video_path": "MiraData/video_clips/000005061/000005061577.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 24.36, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera moving forward to frame a walkway along a river with a few benches and a brick red building in the background. The camera slowly moves upward, gradually bringing the upper portion of the building into the frame, with the building's windows neatly organized.\nAs the camera continues to move upward, more sky appears in the frame and the top of the building enters the frame.", "events": [ { "event": "The video begins with the camera moving forward to frame a walkway along a river with a few benches and a brick red building in the background. The camera slowly moves upward, gradually bringing the upper portion of the building into the frame, with the building's windows neatly organized.", "visual_elements": [ { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "The scene is of a walkway along a river.", "type": "scene", "weight": 3 }, { "content": "There are several benches along the trail.", "type": "attribute", "weight": 2 }, { "content": "A brick red building in the background.", "type": "attribute", "weight": 2 }, { "content": "The camera moves slowly upward.", "type": "camera", "weight": 3 }, { "content": "Gradually the upper part of the building is brought into the frame", "type": "scene", "weight": 3 }, { "content": "The windows of the building are neatly organized.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues to move upward, more sky appears in the frame and the top of the building enters the frame.", "visual_elements": [ { "content": "As the camera continues to move upward.", "type": "camera", "weight": 3 }, { "content": "More of the sky appears in the frame.", "type": "scene", "weight": 2 }, { "content": "The top of the building comes into frame.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0911", "video_path": "MiraData/video_clips/000005061/000005061624.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 21.86, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera  focuses on a distinctive cluster of buildings, with a backdrop of blue sky and white clouds. Multiple high-rise buildings are featured in the scene, some of which have spires and domes atop their roofs. At the bottom of the scene, a river flows, with green trees and grass lining its banks.\nThe camera rotates to the left, revealing a yellow tower crane standing tall among the buildings. To the left-rear of the crane is a towering black building, its windows arranged neatly in rows. At the bottom of the scene, the green river surface persists, and an area along the shore is enclosed by fencing. Pedestrians and tourists are walking around in this enclosed area.\nThe camera pans to the right, shifting the view from the front of the tall building to its right side. This wall of the building is painted in white and blue hues, and sunlight flickers through the hollows of the circular platform. In the back-right corner of the scene, there stands another spire-roofed building. The ground below the building is lined with white arcuate brackets, each adorned with numerous small arcuate forks.\nContinuing to pan right, the upper half of a large crane enters from the right side of the screen. Suspended from the top of the crane is a red hook, and its boom stretches high into the sky. The cityscape is illuminated by the setting sun, with distant mountain ranges visible in the distance. The scene halts as the camera reaches the right-rear corner of the tall building.", "events": [ { "event": "At the beginning of the video, the camera  focuses on a distinctive cluster of buildings, with a backdrop of blue sky and white clouds. Multiple high-rise buildings are featured in the scene, some of which have spires and domes atop their roofs. At the bottom of the scene, a river flows, with green trees and grass lining its banks.", "visual_elements": [ { "content": "The camera focuses on a distinctive cluster of buildings.", "type": "camera", "weight": 3 }, { "content": "The backdrop is a clear blue sky with fluffy white clouds.", "type": "scene", "weight": 1 }, { "content": "Multiple high-rise towers are visible in the scene.", "type": "attribute", "weight": 2 }, { "content": "Some of the building tops feature spires and domes.", "type": "attribute", "weight": 2 }, { "content": "At the bottom of the scene, a river flows gently.", "type": "attribute", "weight": 1 }, { "content": "The riverbanks are adorned with lush green trees and grassy areas.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera rotates to the left, revealing a yellow tower crane standing tall among the buildings. To the left-rear of the crane is a towering black building, its windows arranged neatly in rows. At the bottom of the scene, the green river surface persists, and an area along the shore is enclosed by fencing. Pedestrians and tourists are walking around in this enclosed area.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "A yellow tower crane stands prominently among the buildings.", "type": "attribute", "weight": 2 }, { "content": "To the left-rear of the tower crane stands a tall black building.", "type": "attribute", "weight": 2 }, { "content": "The windows of the building are neatly arranged.", "type": "attribute", "weight": 1 }, { "content": "At the bottom of the scene, the green river surface persists.", "type": "attribute", "weight": 1 }, { "content": "An enclosed area along the shore is fenced off.", "type": "attribute", "weight": 1 }, { "content": "Pedestrians and tourists walk around in this enclosed area.", "type": "action", "weight": 2 } ] }, { "event": "The camera pans to the right, shifting the view from the front of the tall building to its right side. This wall of the building is painted in white and blue hues, and sunlight flickers through the hollows of the circular platform. In the back-right corner of the scene, there stands another spire-roofed building. The ground below the building is lined with white arcuate brackets, each adorned with numerous small arcuate forks.", "visual_elements": [ { "content": "The camera continues to pan to the left.", "type": "camera", "weight": 3 }, { "content": "The tower crane and the black building move out of the scene.", "type": "attribute", "weight": 1 }, { "content": "Several white boats are now visible gliding on the river.", "type": "attribute", "weight": 3 }, { "content": "The boats leave ripples on the water surface.", "type": "attribute", "weight": 1 }, { "content": "Many white boats are moored near the upper left corner of the river, close to the shore.", "type": "scene", "weight": 2 } ] }, { "event": "Continuing to pan right, the upper half of a large crane enters from the right side of the screen. Suspended from the top of the crane is a red hook, and its boom stretches high into the sky. The cityscape is illuminated by the setting sun, with distant mountain ranges visible in the distance. The scene halts as the camera reaches the right-rear corner of the tall building.", "visual_elements": [ { "content": "The camera continues to pan to the left.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on an expansive stretch of water.", "type": "camera", "weight": 3 }, { "content": "A covered bridge spans the water.", "type": "attribute", "weight": 2 }, { "content": "Behind the bridge, tall buildings rise up.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0912", "video_path": "MiraData/video_clips/000005061/000005061658.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 20.36, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a distinctive cluster of buildings, against a backdrop of blue skies and white clouds. There are multiple high-rise towers in the scene, with some of the buildings featuring spires and domes atop them. At the bottom of the scene, a river flows, with green trees and lawns lining its banks.\nThe camera pans to the left, revealing blue-roofed houses in front of the Gothic building, and adjacent houses topped with black sunshades. The majority of the buildings in the background are low-rise, primarily painted in red and white hues. Green vegetation dots the area between buildings.\nAs the camera continues to pan left, it shifts from the front of the Gothic building to its left side, revealing an even broader urban landscape. In the distance, undulating hills are visible, with the central slopes covered in green grass. The sky is filled with clouds.\nThe camera continues to panning to the right, ultimately focusing on a statue at the top of the building. The statue is embedded within the tower's peak, with its posture and details clearly visible. The backdrop remains a clear blue sky with white clouds, and tree branches enter the scene from the right side.", "events": [ { "event": "At the beginning of the video, the camera focuses on a distinctive cluster of buildings, against a backdrop of blue skies and white clouds. There are multiple high-rise towers in the scene, with some of the buildings featuring spires and domes atop them. At the bottom of the scene, a river flows, with green trees and lawns lining its banks.", "visual_elements": [ { "content": "The camera focuses on a section of a Gothic-style building.", "type": "camera", "weight": 3 }, { "content": "Stone columns and a shield emblem of the building are visible in the scene.", "type": "attribute", "weight": 3 }, { "content": "The emblem is silver.", "type": "attribute", "weight": 2 }, { "content": "Red dots adorn the emblem.", "type": "attribute", "weight": 2 }, { "content": "The backdrop is a blue sky with white clouds.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans to the left, revealing blue-roofed houses in front of the Gothic building, and adjacent houses topped with black sunshades. The majority of the buildings in the background are low-rise, primarily painted in red and white hues. Green vegetation dots the area between buildings.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "Another shield emblem on a different door enters the scene.", "type": "attribute", "weight": 3 }, { "content": "This emblem is red.", "type": "attribute", "weight": 2 }, { "content": "A golden cross is featured on the emblem.", "type": "attribute", "weight": 2 }, { "content": "Blue diagonal stripes adorned with golden patterns cross the emblem.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues to pan left, it shifts from the front of the Gothic building to its left side, revealing an even broader urban landscape. In the distance, undulating hills are visible, with the central slopes covered in green grass. The sky is filled with clouds.", "visual_elements": [ { "content": "The camera pans upwards and to the right.", "type": "camera", "weight": 3 }, { "content": "The tower of the building appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "Intricate carvings are visible between the stone columns of the building.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to panning to the right, ultimately focusing on a statue at the top of the building. The statue is embedded within the tower's peak, with its posture and details clearly visible. The backdrop remains a clear blue sky with white clouds, and tree branches enter the scene from the right side.", "visual_elements": [ { "content": "The camera continues to panning to the right.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on a statue embedded at the top of the building.", "type": "camera", "weight": 3 }, { "content": "The statue is embedded within the tower's peak.", "type": "attribute", "weight": 2 }, { "content": "The statue's posture and details are clearly visible.", "type": "attribute", "weight": 1 }, { "content": "The backdrop remains a clear blue sky with white clouds", "type": "scene", "weight": 1 }, { "content": "Tree branches enter the scene from the right side.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 19 }, { "index": "TUNA_0913", "video_path": "MiraData/video_clips/000005061/000005061672.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 23.76, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a green meadow with a low-angle perspective, and there are scattered fallen leaves on the ground. In the upper left corner of the scene, a person wearing a yellow jacket appears, sitting beside a large tree with their back to the camera. The camera pans forward slowly, and gradually, another person wearing a black coat emerges in the upper left corner, sitting alongside the first person, chatting and gazing at the distant scenery.\nThe camera pans to the left and forward, revealing an expansive meadow in front of the two people sitting under the tree. Many individuals are engaged in various activities on the meadow, with green trees surrounding its left side. Behind the meadow, there are white modern buildings, connected by a colonnade between two structures. The exterior wall of the building on the left is surrounded by scaffolding. Dense green trees are distributed behind the buildings.\nAs the camera moves right and forward, the crowd on the meadow is seen sitting or standing, dispersed in different locations. Some people are resting on the grass, some are taking walks, and some are playing soccer. In the distance, a forest of tall buildings stands tall.", "events": [ { "event": "At the beginning of the video, the camera focuses on a green meadow with a low-angle perspective, and there are scattered fallen leaves on the ground. In the upper left corner of the scene, a person wearing a yellow jacket appears, sitting beside a large tree with their back to the camera. The camera pans forward slowly, and gradually, another person wearing a black coat emerges in the upper left corner, sitting alongside the first person, chatting and gazing at the distant scenery.", "visual_elements": [ { "content": "The camera focuses on a green meadow with a low-angle perspective.", "type": "camera", "weight": 3 }, { "content": "There are scattered fallen leaves on the ground.", "type": "attribute", "weight": 1 }, { "content": "A person wearing a yellow jacket appears in the upper left corner of the scene.", "type": "attribute", "weight": 2 }, { "content": "This person is sitting beside a large tree with their back to the camera.", "type": "attribute", "weight": 2 }, { "content": "The camera pans forward slowly.", "type": "camera", "weight": 3 }, { "content": "Gradually, another person emerges in the upper left corner of the scene.", "type": "attribute", "weight": 2 }, { "content": "This person wears a black coat.", "type": "attribute", "weight": 2 }, { "content": "The two people are sitting side by side under the tree, chatting and gazing at the distant scenery.", "type": "action", "weight": 2 } ] }, { "event": "The camera pans to the left and forward, revealing an expansive meadow in front of the two people sitting under the tree. Many individuals are engaged in various activities on the meadow, with green trees surrounding its left side. Behind the meadow, there are white modern buildings, connected by a colonnade between two structures. The exterior wall of the building on the left is surrounded by scaffolding. Dense green trees are distributed behind the buildings.", "visual_elements": [ { "content": "The camera pans to the left and forward.", "type": "camera", "weight": 3 }, { "content": "In front of the two people sitting under the tree, there is an expansive meadow.", "type": "attribute", "weight": 3 }, { "content": "Many individuals are engaged in various activities on the meadow.", "type": "attribute", "weight": 2 }, { "content": "Green trees surround the left side of the meadow.", "type": "attribute", "weight": 2 }, { "content": "Behind the meadow, there are white modern buildings.", "type": "attribute", "weight": 2 }, { "content": "Two buildings are connected by a colonnade.", "type": "attribute", "weight": 2 }, { "content": "The exterior wall of the building on the left is surrounded by scaffolding.", "type": "attribute", "weight": 1 }, { "content": "Dense green trees are distributed behind the buildings.", "type": "scene", "weight": 2 } ] }, { "event": "As the camera moves right and forward, the crowd on the meadow is seen sitting or standing, dispersed in different locations. Some people are resting on the grass, some are taking walks, and some are playing soccer. In the distance, a forest of tall buildings stands tall.", "visual_elements": [ { "content": "The camera moves right and forward simultaneously.", "type": "camera", "weight": 3 }, { "content": "The crowd on the meadow is seen sitting or standing, dispersed in different locations.", "type": "attribute", "weight": 3 }, { "content": "Some people are resting on the grass, some are taking walks, and some are playing soccer.", "type": "action", "weight": 3 }, { "content": "In the distance, a forest of tall buildings stands tall.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0914", "video_path": "MiraData/video_clips/000005061/000005061837.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 20.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera surveys the entire city from above, focusing on a magnificent Gothic cathedral with a cruciform roof and towering spires. The cathedral is surrounded by lush trees and well-trimmed lawns.\nThe camera rotates to the left, shifting the view from the front of the cathedral to its side, revealing intricate carvings and beautiful windows. Behind the cathedral, the urban landscape is dotted with buildings of varying heights, and a river winds its way through the city. Several boats are docked on the river, and the water surface shimmers in the sunlight. In the distance, the hillsides are covered with greenery.\nThe camera continues to rotate to the left, shifting the view from the side of the cathedral to its rear. The winding river exits the scene, and vehicles travel orderly on the city streets. Opposite the cathedral, there is a parking lot, and the buildings in the background stretch as far as the eye can see. In the distance, a prominent white tower stands out, with a black top.", "events": [ { "event": "At the beginning of the video, the camera surveys the entire city from above, focusing on a magnificent Gothic cathedral with a cruciform roof and towering spires. The cathedral is surrounded by lush trees and well-trimmed lawns.", "visual_elements": [ { "content": "The camera surveys the entire city from above.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on a magnificent Gothic cathedral.", "type": "camera", "weight": 3 }, { "content": "The cathedral's roof is cruciform.", "type": "attribute", "weight": 2 }, { "content": "The cathedral's spires rise high into the sky.", "type": "attribute", "weight": 2 }, { "content": "The cathedral is surrounded by lush trees.", "type": "attribute", "weight": 2 }, { "content": "The lawns are neatly trimmed.", "type": "scene", "weight": 1 } ] }, { "event": "The camera rotates to the left, shifting the view from the front of the cathedral to its side, revealing intricate carvings and beautiful windows. Behind the cathedral, the urban landscape is dotted with buildings of varying heights, and a river winds its way through the city. Several boats are docked on the river, and the water surface shimmers in the sunlight. In the distance, the hillsides are covered with greenery.", "visual_elements": [ { "content": "The camera rotates to the left.", "type": "camera", "weight": 3 }, { "content": "The scene shifts from the front of the cathedral to its side.", "type": "camera", "weight": 3 }, { "content": "The cathedral features intricate carvings and beautiful windows.", "type": "attribute", "weight": 1 }, { "content": "Behind the cathedral, the urban landscape is dotted with buildings of varying heights.", "type": "attribute", "weight": 1 }, { "content": "A river winds its way through the city.", "type": "attribute", "weight": 2 }, { "content": "Several boats are docked on the river.", "type": "attribute", "weight": 2 }, { "content": "The water surface shimmers in the sunlight.", "type": "attribute", "weight": 1 }, { "content": "In the distance, the hillsides are covered with greenery.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to rotate to the left, shifting the view from the side of the cathedral to its rear. The winding river exits the scene, and vehicles travel orderly on the city streets. Opposite the cathedral, there is a parking lot, and the buildings in the background stretch as far as the eye can see. In the distance, a prominent white tower stands out, with a black top.", "visual_elements": [ { "content": "The camera continues to rotate to the left.", "type": "camera", "weight": 3 }, { "content": "The scene shifts from the side of the cathedral to its rear.", "type": "camera", "weight": 3 }, { "content": "The winding river exits the scene.", "type": "attribute", "weight": 2 }, { "content": "Vehicles travel orderly on the city streets.", "type": "attribute", "weight": 1 }, { "content": "Opposite the cathedral, there is a parking lot.", "type": "attribute", "weight": 1 }, { "content": "The buildings in the background stretch as far as the eye can see.", "type": "scene", "weight": 2 }, { "content": "In the distance, a prominent white tower stands out.", "type": "scene", "weight": 2 }, { "content": "The top of the white tower is black.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0915", "video_path": "MiraData/video_clips/000005061/000005061856.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Autos & Vehicles", "duration": 20.8, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on the ground, showing an asphalt road surface covered with moss and fallen leaves. Then, the camera is lifted up, and a yellow cable comes into view in the frame.\nNext, the camera moves upward. It can be seen that one end of the cable extends to a telegraph pole on the roadside, and the other end of the cable is connected to a black car parked on the roadside. The rear left taillight of this car is red, and parts of the car's tires and body come into the picture. Another car can be seen parked in front of the black car in the background.", "events": [ { "event": "At the beginning of the video, the camera is focused on the ground, showing an asphalt road surface covered with moss and fallen leaves. Then, the camera is lifted up, and a yellow cable comes into view in the frame.", "visual_elements": [ { "content": "The camera is focused on the ground.", "type": "camera", "weight": 3 }, { "content": "It shows an asphalt road surface covered with moss and fallen leaves.", "type": "scene", "weight": 2 }, { "content": "The camera is lifted up.", "type": "camera", "weight": 3 }, { "content": "A yellow cable comes into view in the frame.", "type": "attribute", "weight": 3 } ] }, { "event": "Next, the camera moves upward. It can be seen that one end of the cable extends to a telegraph pole on the roadside, and the other end of the cable is connected to a black car parked on the roadside. The rear left taillight of this car is red, and parts of the car's tires and body come into the picture. Another car can be seen parked in front of the black car in the background.", "visual_elements": [ { "content": "The camera moves upward.", "type": "camera", "weight": 3 }, { "content": "It can be seen that one end of the cable extends to a telegraph pole on the roadside.", "type": "attribute", "weight": 2 }, { "content": "The other end of the cable is connected to a black car parked on the roadside.", "type": "attribute", "weight": 3 }, { "content": "The rear left taillight of this car is red.", "type": "attribute", "weight": 2 }, { "content": "Parts of the car's tires and body come into the picture.", "type": "attribute", "weight": 2 }, { "content": "Another car can be seen parked in front of the black car in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 2, "n_elements": 10 }, { "index": "TUNA_0916", "video_path": "MiraData/video_clips/000005061/000005061925.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 27.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the start of the video, the camera focuses on a pink building with a white door marked with the number \"18\" on its front. There are two windows next to the door. The streets surrounding the building are paved with stone slabs, and yellow double lines mark the roadside.\nThe camera slowly moves to the right as a white delivery truck enters the scene from the left. The passenger side door is marked with the words \"Service Partner of DHL,\" and the truck's body and rear door feature prominent DHL logos. The driver is a black male wearing glasses and a hat, dressed in a red and yellow work vest.\nThe truck makes a left turn, and the camera follows it slowly to the right and forward. The truck continues to travel along the road, with yellow double lines marking both sides. The buildings on both sides of the road have an antique and elegant appearance. In front of the buildings on the left side, there is a green shrubbery.\nThe camera stops panning, while the truck continues to move forward. At the end of the path, a building undergoing renovation is covered with white plastic sheeting. An orange double-decker bus passes in front of the building.", "events": [ { "event": "At the start of the video, the camera focuses on a pink building with a white door marked with the number \"18\" on its front. There are two windows next to the door. The streets surrounding the building are paved with stone slabs, and yellow double lines mark the roadside.", "visual_elements": [ { "content": "The camera focuses on a pink building.", "type": "camera", "weight": 3 }, { "content": "The front of the building has a white door.", "type": "attribute", "weight": 2 }, { "content": "The door is marked with the number \"18\".", "type": "attribute", "weight": 2 }, { "content": "There are two windows next to the door.", "type": "attribute", "weight": 2 }, { "content": "The streets surrounding the building are paved with stone slabs.", "type": "attribute", "weight": 1 }, { "content": "Yellow double lines mark the roadside.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera slowly moves to the right as a white delivery truck enters the scene from the left. The passenger side door is marked with the words \"Service Partner of DHL,\" and the truck's body and rear door feature prominent DHL logos. The driver is a black male wearing glasses and a hat, dressed in a red and yellow work vest.", "visual_elements": [ { "content": "The camera slowly moves to the right.", "type": "camera", "weight": 3 }, { "content": "A white delivery truck enters the scene from the left.", "type": "action", "weight": 3 }, { "content": "The passenger side door is marked with the words \"Service Partner of DHL\".", "type": "attribute", "weight": 2 }, { "content": "The truck's body and rear door feature prominent DHL logos.", "type": "attribute", "weight": 2 }, { "content": "The driver is a black male wearing glasses and a hat.", "type": "attribute", "weight": 2 }, { "content": "The driver is dressed in a red and yellow work vest.", "type": "attribute", "weight": 1 } ] }, { "event": "The truck makes a left turn, and the camera follows it slowly to the right and forward. The truck continues to travel along the road, with yellow double lines marking both sides. The buildings on both sides of the road have an antique and elegant appearance. In front of the buildings on the left side, there is a green shrubbery.", "visual_elements": [ { "content": "The truck makes a left turn.", "type": "action", "weight": 3 }, { "content": "The camera follows the truck as it slowly moves to the right and forward.", "type": "camera", "weight": 3 }, { "content": "The truck continues to travel along the road.", "type": "action", "weight": 2 }, { "content": "Yellow double lines mark both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "The buildings on both sides of the road have an antique and elegant appearance.", "type": "attribute", "weight": 2 }, { "content": "In front of the buildings on the left side, there is a green shrubbery.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera stops panning, while the truck continues to move forward. At the end of the path, a building undergoing renovation is covered with white plastic sheeting. An orange double-decker bus passes in front of the building.", "visual_elements": [ { "content": "The camera stops panning.", "type": "camera", "weight": 3 }, { "content": "The truck continues to move forward.", "type": "action", "weight": 2 }, { "content": "At the end of the path, there is a building undergoing renovation.", "type": "attribute", "weight": 2 }, { "content": "The exterior wall is covered with white plastic sheeting.", "type": "attribute", "weight": 2 }, { "content": "An orange double-decker bus passes in front of the building.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0917", "video_path": "MiraData/video_clips/000005061/000005061970.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 24.84, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera overlooks a vast water area with rippling waves on the surface. In the upper left corner of the scene, there is a red buoy gently swaying on the water. As the camera moves forward slowly, the buoy disappears from the scene, leaving the water surface to occupy the entire view.\nAs the camera pans forward, it simultaneously tilts upwards, gradually bringing the buildings along the waterfront into view. On the right side of the scene, there stands a row of prominent red-brick buildings, while among the other buildings, there are some modern high-rise towers as well as some with historical architectural styles.\nThe camera continues to pan forward, revealing a dock on the left side of the scene, with several ships moored there. In the background, some cranes and other port facilities can be seen. The sky is overcast.", "events": [ { "event": "At the beginning of the video, the camera overlooks a vast water area with rippling waves on the surface. In the upper left corner of the scene, there is a red buoy gently swaying on the water. As the camera moves forward slowly, the buoy disappears from the scene, leaving the water surface to occupy the entire view.", "visual_elements": [ { "content": "The camera overlooks a vast water area.", "type": "camera", "weight": 3 }, { "content": "Waves ripple gently on the water surface.", "type": "attribute", "weight": 2 }, { "content": "In the upper left corner of the scene, there is a red buoy.", "type": "attribute", "weight": 2 }, { "content": "The buoy slightly sways on the water.", "type": "attribute", "weight": 2 }, { "content": "The camera moves forward slowly.", "type": "camera", "weight": 3 }, { "content": "The buoy disappears from the scene.", "type": "attribute", "weight": 1 }, { "content": "The water surface fills the entire scene.", "type": "attribute", "weight": 3 } ] }, { "event": "As the camera pans forward, it simultaneously tilts upwards, gradually bringing the buildings along the waterfront into view. On the right side of the scene, there stands a row of prominent red-brick buildings, while among the other buildings, there are some modern high-rise towers as well as some with historical architectural styles.", "visual_elements": [ { "content": "As the camera pans forward, it simultaneously tilts upwards,", "type": "camera", "weight": 3 }, { "content": "gradually bringing the buildings along the waterfront into view.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the scene, there stands a row of prominent red-brick buildings.", "type": "attribute", "weight": 2 }, { "content": "Among the other buildings, there are some modern high-rise towers as well as some with historical architectural styles.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to pan forward, revealing a dock on the left side of the scene, with several ships moored there. In the background, some cranes and other port facilities can be seen. The sky is overcast.", "visual_elements": [ { "content": "The camera continues to pan forward.", "type": "camera", "weight": 3 }, { "content": "On the left side of the scene, there is a dock,", "type": "attribute", "weight": 2 }, { "content": "There are several ships docked at the pier.", "type": "attribute", "weight": 2 }, { "content": "In the background, some cranes and other port facilities can be seen.", "type": "scene", "weight": 2 }, { "content": "The sky is overcast.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0918", "video_path": "MiraData/video_clips/000005062/000005062299.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 21.24, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera pans down from a high altitude over a large green lawn. On the lawn, there is a white rectangular covering, with one end secured to the ground by a cylindrical object. A portion of the earth is exposed on the grass at the other end of the covering. The lawn has distinct striped patterns, with varying shades of green in adjacent stripes.\nThe camera continues to pan downwards, gradually revealing the edges of the lawn and the surrounding environment. A green track can be seen circling the lawn, and outside the track are bleachers. The neatly arranged white seats in the bleachers are clearly visible. There is a pathway beneath the bleachers, and to the right of the pathway, a section of the bleachers is covered with black cloth.\nAs the camera continues panning down, the gray rooftops surrounding the stadium appear in the scene. Near the outer wall, a tall white streetlamp stands tall. The cement road outside the stadium is clean and tidy, with a white solid line painted in the middle. On the half of the road farther from the stadium, neatly painted yellow markings are visible.", "events": [ { "event": "At the beginning of the video, the camera pans down from a high altitude over a large green lawn. On the lawn, there is a white rectangular covering, with one end secured to the ground by a cylindrical object. A portion of the earth is exposed on the grass at the other end of the covering. The lawn has distinct striped patterns, with varying shades of green in adjacent stripes.", "visual_elements": [ { "content": "The camera overlooks a large green lawn from above.", "type": "camera", "weight": 3 }, { "content": "There is a white rectangular covering on the lawn.", "type": "attribute", "weight": 3 }, { "content": "One end of the covering is secured to the ground by a cylindrical object.", "type": "attribute", "weight": 2 }, { "content": "A portion of the earth is exposed on the grass at the other end of the covering.", "type": "attribute", "weight": 2 }, { "content": "The lawn has distinct striped patterns.", "type": "attribute", "weight": 2 }, { "content": "The green shades of adjacent stripes differ.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to pan downwards, gradually revealing the edges of the lawn and the surrounding environment. A green track can be seen circling the lawn, and outside the track are bleachers. The neatly arranged white seats in the bleachers are clearly visible. There is a pathway beneath the bleachers, and to the right of the pathway, a section of the bleachers is covered with black cloth.", "visual_elements": [ { "content": "The camera continues to pan downwards.", "type": "camera", "weight": 3 }, { "content": "There is a green track circling the lawn.", "type": "attribute", "weight": 1 }, { "content": "Outside the track are bleachers.", "type": "attribute", "weight": 3 }, { "content": "The bleachers are lined with neatly arranged white seats.", "type": "attribute", "weight": 2 }, { "content": "There is a pathway beneath the bleachers.", "type": "attribute", "weight": 2 }, { "content": "A section of the bleachers on the right side of the pathway is covered with black cloth.", "type": "attribute", "weight": 1 } ] }, { "event": "As the camera continues panning down, the gray rooftops surrounding the stadium appear in the scene. Near the outer wall, a tall white streetlamp stands tall. The cement road outside the stadium is clean and tidy, with a white solid line painted in the middle. On the half of the road farther from the stadium, neatly painted yellow markings are visible.", "visual_elements": [ { "content": "The camera continues panning down.", "type": "camera", "weight": 3 }, { "content": "The gray rooftops surrounding the stadium appear in the scene.", "type": "attribute", "weight": 2 }, { "content": "Near the outer wall, there is a tall white streetlamp.", "type": "attribute", "weight": 2 }, { "content": "The cement road outside the stadium is clean and tidy.", "type": "attribute", "weight": 1 }, { "content": "A white solid line is painted in the middle of the road.", "type": "attribute", "weight": 1 }, { "content": "On the half of the road farther from the stadium, neatly painted yellow markings are visible.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0919", "video_path": "MiraData/video_clips/000005062/000005062342.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 21.76, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the modern architecture on the left side of the scene boasts a unique design style, with its exterior wall composed of numerous glass windows and white walls. On the right side of the scene stands a majestic red-brick building, with two large white columns towering on both sides of the stone wall at the entrance. The part of the building connected to the bottom of the columns is illuminated by a surrounding light strip. Between the two buildings, there is a lit streetlight, with only the top of the streetlight visible in the scene.\nThe camera pans forward while simultaneously lifting up, revealing the top of the building on the left side in the scene. The rear half of the white building features a curved exterior wall, with numerous neatly arranged parallelogram windows on it. The red-brick part of the building on the right side pans out of the scene, leaving only the two white columns partially visible in the scene.\nThe camera continues to pan forward while descending, revealing a camera on the pole of the streetlight. The first streetlight pans out of the scene, and a row of streetlights comes into view, with black poles. The quaint red-brick building stands opposite the modern white building.", "events": [ { "event": "At the beginning of the video, the modern architecture on the left side of the scene boasts a unique design style, with its exterior wall composed of numerous glass windows and white walls. On the right side of the scene stands a majestic red-brick building, with two large white columns towering on both sides of the stone wall at the entrance. The part of the building connected to the bottom of the columns is illuminated by a surrounding light strip. Between the two buildings, there is a lit streetlight, with only the top of the streetlight visible in the scene.", "visual_elements": [ { "content": "The modern architecture on the left side of the scene features a unique design style.", "type": "attribute", "weight": 3 }, { "content": "The building's exterior facade is composed of numerous glass windows and white walls.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the scene stands a majestic red-brick building.", "type": "attribute", "weight": 3 }, { "content": "Two large white columns stand tall on both sides of the stone wall at the entrance of the building.", "type": "attribute", "weight": 2 }, { "content": "The part of the building connected to the bottom of the columns is illuminated by a surrounding light strip.", "type": "attribute", "weight": 2 }, { "content": "Between the two buildings, there is a lit streetlamp.", "type": "attribute", "weight": 2 }, { "content": "Only the top part of a streetlamp is visible in the scene", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans forward while simultaneously lifting up, revealing the top of the building on the left side in the scene. The rear half of the white building features a curved exterior wall, with numerous neatly arranged parallelogram windows on it. The red-brick part of the building on the right side pans out of the scene, leaving only the two white columns partially visible in the scene.", "visual_elements": [ { "content": "The camera pans forward while simultaneously tilting upwards.", "type": "camera", "weight": 3 }, { "content": "The top of the building on the left comes into view.", "type": "attribute", "weight": 2 }, { "content": "The entire exterior wall of the rear half of the white building is curved.", "type": "attribute", "weight": 3 }, { "content": "The wall features many neatly arranged parallelogram windows.", "type": "attribute", "weight": 2 }, { "content": "The red brick section of the building on the right exits the scene,", "type": "attribute", "weight": 2 }, { "content": "leaving only two white pillars visible in the shot.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to pan forward while descending, revealing a camera on the pole of the streetlight. The first streetlight pans out of the scene, and a row of streetlights comes into view, with black poles. The quaint red-brick building stands opposite the modern white building.", "visual_elements": [ { "content": "The camera pans forward while simultaneously descending.", "type": "camera", "weight": 3 }, { "content": "A camera is mounted on the pole of the first streetlamp.", "type": "attribute", "weight": 1 }, { "content": "As the first streetlamp exits the scene, a row of streetlamps comes into view.", "type": "attribute", "weight": 2 }, { "content": "The poles of the streetlamps are black.", "type": "attribute", "weight": 2 }, { "content": "An antique red-brick building stands in contrast to a modern white building.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0920", "video_path": "MiraData/video_clips/000005062/000005062526.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 21.68, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the trunk of a large tree with rough and textured bark. As the camera slowly moves to the right, the trunk gradually moves out from the left side of the frame, revealing a bridge and leaves in the background. As the camera continues to move to the right, the tree trunk moves completely out of frame and the view becomes more open.\nAs the camera continues to move to the right, a suspension bridge appears, with its pylons and steel cables clearly visible. In the background of the bridge is a blue sky and some clouds, and below the bridge are thick trees and green grass. As the camera continues to move to the right, a full view of the bridge gradually emerges, with details of the towers and the structure of the bridge deck visible.", "events": [ { "event": "At the beginning of the video, the camera focuses on the trunk of a large tree with rough and textured bark. As the camera slowly moves to the right, the trunk gradually moves out from the left side of the frame, revealing a bridge and leaves in the background. As the camera continues to move to the right, the tree trunk moves completely out of frame and the view becomes more open.", "visual_elements": [ { "content": "The image is of the trunk of a large tree.", "type": "scene", "weight": 3 }, { "content": "The trunk is rough.", "type": "attribute", "weight": 2 }, { "content": "The camera slowly moves to the right.", "type": "camera", "weight": 3 }, { "content": "The trunk gradually moves out from the left side of the frame.", "type": "scene", "weight": 3 }, { "content": "Exposing bridges and foliage in the background.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues to move to the right, a suspension bridge appears, with its pylons and steel cables clearly visible. In the background of the bridge is a blue sky and some clouds, and below the bridge are thick trees and green grass. As the camera continues to move to the right, a full view of the bridge gradually emerges, with details of the towers and the structure of the bridge deck visible.", "visual_elements": [ { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "A suspension bridge appears.", "type": "scene", "weight": 3 }, { "content": "The pylons and steel cables of the bridge are clearly visible.", "type": "attribute", "weight": 2 }, { "content": "Dense trees and green grass below the bridge.", "type": "attribute", "weight": 2 }, { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "The full view of the bridge is gradually revealed.", "type": "attribute", "weight": 2 }, { "content": "The details of the pylons and the structure of the bridge deck are visible.", "type": "attribute", "weight": 1 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0921", "video_path": "MiraData/video_clips/000005062/000005062672.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 20.12, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on an ancient stone wall with a rough surface and a color that is a mix of gray and yellow. In the background, there is a curved road, and across the road, the stone wall of a building is covered with moss, with a red gate beside it.\nThe camera pans uptoward and to the right, gradually revealing the full view of the stone wall on the left side of the street. The top of the stone wall curves inward and is covered with moss. Outside the stone wall, there is a leaning tree with a sturdy trunk and branches stretching outwards. On the right side of the street, there are several low-rise buildings with light yellow walls and dark colored roof tiles. The architectural style is simple and quaint.\nThe camera continues to pan uptoward and to the right, showing yellow fallen leaves piled up on both sides of the street. At the end of the street, a pedestrian walks along the right side of the road. Subsequently, he reaches out his right hand to a black pipe outside a nearby house. In the background, vehicles drive along the horizontal road.", "events": [ { "event": "At the beginning of the video, the camera focuses on an ancient stone wall with a rough surface and a color that is a mix of gray and yellow. In the background, there is a curved road, and across the road, the stone wall of a building is covered with moss, with a red gate beside it.", "visual_elements": [ { "content": "The camera focuses on an ancient stone wall.", "type": "camera", "weight": 3 }, { "content": "The surface of the stone wall is rough.", "type": "attribute", "weight": 2 }, { "content": "Its color is a mix of gray and yellow.", "type": "attribute", "weight": 1 }, { "content": "In the background, there is a curved road.", "type": "scene", "weight": 1 }, { "content": "The stone wall of a building across the road is covered with moss.", "type": "attribute", "weight": 1 }, { "content": "Next to this stone wall, there is a red gate.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans uptoward and to the right, gradually revealing the full view of the stone wall on the left side of the street. The top of the stone wall curves inward and is covered with moss. Outside the stone wall, there is a leaning tree with a sturdy trunk and branches stretching outwards. On the right side of the street, there are several low-rise buildings with light yellow walls and dark colored roof tiles. The architectural style is simple and quaint.", "visual_elements": [ { "content": "The camera pans uptoward and to the right.", "type": "camera", "weight": 3 }, { "content": "The full view of the stone wall on the left side of the street gradually comes into view.", "type": "attribute", "weight": 3 }, { "content": "The top of the stone wall curves inward.", "type": "attribute", "weight": 2 }, { "content": "The top of the stone wall is covered with moss.", "type": "attribute", "weight": 2 }, { "content": "Outside the stone wall, there is a leaning tree.", "type": "attribute", "weight": 3 }, { "content": "The tree has a sturdy trunk.", "type": "attribute", "weight": 2 }, { "content": "Its branches stretch outwards.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the street, there are several low-rise buildings.", "type": "attribute", "weight": 2 }, { "content": "The walls of the buildings are light yellow.", "type": "attribute", "weight": 2 }, { "content": "The roofs of the buildings are covered with dark colored tiles.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to pan uptoward and to the right, showing yellow fallen leaves piled up on both sides of the street. At the end of the street, a pedestrian walks along the right side of the road. Subsequently, he reaches out his right hand to a black pipe outside a nearby house. In the background, vehicles drive along the horizontal road.", "visual_elements": [ { "content": "The camera continues to pan uptoward and to the right.", "type": "camera", "weight": 3 }, { "content": "Yellow fallen leaves are piled up on both sides of the street.", "type": "scene", "weight": 1 }, { "content": "At the end of the street, a pedestrian walks along the right side of the road.", "type": "action", "weight": 3 }, { "content": "Subsequently, the pedestrian reaches out his right hand to a black pipe outside a nearby house.", "type": "action", "weight": 2 }, { "content": "In the background, vehicles drive along the horizontal road.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0922", "video_path": "MiraData/video_clips/000005062/000005062692.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 23.64, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a unique set of stairs. The design of the stairs presents a wavy curve, mainly using gray and yellow stone. There are metal handrails on both sides of the stairs. In the background, there is a white modern building on the right side of the picture, and the windows of the building are transparent. There is a tree between the stairs and the building, and the tree is lush and leafy.\nThe camera moves slowly to the right. A man wearing a black jacket and blue jeans enters from the right side of the screen, carrying a black backpack with orange stripes and a gray hat. He walks up the stone steps in white shoes with steady steps. The camera follows behind the man. Occasionally, pedestrians pass by in the background.\nAt the top of the stairs, a white man and a black man are greeting each other. The white man is wearing a brown coat, and the black man is carrying a black backpack and wearing a blue shirt with red letters. Part of the interior furnishings can be seen behind the glass window.\nAt the end of the video, the camera continues to move to the right. The man walking on the stairs and the two men greeting each other gradually disappear to the left side of the screen. A woman wearing a black coat and a black hat appears at the top of the stairs on the right side of the screen. The woman is walking to the left side of the screen. In the background, the top of the stairs is surrounded by a transparent railing with a black handrail.", "events": [ { "event": "The video begins with the camera focusing on a unique set of stairs. The design of the stairs presents a wavy curve, mainly using gray and yellow stone. There are metal handrails on both sides of the stairs. In the background, there is a white modern building on the right side of the picture, and the windows of the building are transparent. There is a tree between the stairs and the building, and the tree is lush and leafy.", "visual_elements": [ { "content": "The camera is focused on a set of unique stairs.", "type": "camera", "weight": 3 }, { "content": "The design of the stairs presents a wavy curve.", "type": "attribute", "weight": 3 }, { "content": "The stairs are mainly made of gray and yellow stone.", "type": "attribute", "weight": 2 }, { "content": "There are metal handrails on both sides of the stairs.", "type": "attribute", "weight": 2 }, { "content": "In the background, there is a white modern building on the right side of the screen.", "type": "scene", "weight": 3 }, { "content": "The windows of the building are transparent.", "type": "scene", "weight": 1 }, { "content": "There is a tree between the stairs and the building.", "type": "scene", "weight": 2 }, { "content": "The tree is lush and leafy.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves slowly to the right. A man wearing a black jacket and blue jeans enters from the right side of the screen, carrying a black backpack with orange stripes and a gray hat. He walks up the stone steps in white shoes with steady steps. The camera follows behind the man. Occasionally, pedestrians pass by in the background.", "visual_elements": [ { "content": "The camera moves slowly to the right.", "type": "camera", "weight": 3 }, { "content": "A man in a black jacket and blue jeans enters from the right side of the screen.", "type": "attribute", "weight": 3 }, { "content": "The man wears white shoes, a black backpack with orange stripes, and a gray hat.", "type": "attribute", "weight": 2 }, { "content": "The man walks up the stone steps with a vigorous pace.", "type": "action", "weight": 3 }, { "content": "Occasionally, pedestrians pass by in the background.", "type": "scene", "weight": 1 } ] }, { "event": "At the top of the stairs, a white man and a black man are greeting each other. The white man is wearing a brown coat, and the black man is carrying a black backpack and wearing a blue shirt with red letters. Part of the interior furnishings can be seen behind the glass window.", "visual_elements": [ { "content": "A white man and a black man are greeting each other at the top of the stairs.", "type": "attribute", "weight": 2 }, { "content": "The white man is wearing a brown coat, and the black man is carrying a black backpack and wearing a blue shirt with red letters.", "type": "attribute", "weight": 1 }, { "content": "Some of the furnishings in the room can be seen behind the glass window.", "type": "scene", "weight": 1 } ] }, { "event": "At the end of the video, the camera continues to move to the right. The man walking on the stairs and the two men greeting each other gradually disappear to the left side of the screen. A woman wearing a black coat and a black hat appears at the top of the stairs on the right side of the screen. The woman is walking to the left side of the screen. In the background, the top of the stairs is surrounded by a transparent railing with a black handrail.", "visual_elements": [ { "content": "At the end of the video, the camera continues to move to the right.", "type": "camera", "weight": 2 }, { "content": "The man walking on the stairs and the two men greeting each other gradually disappear to the left side of the screen.", "type": "attribute", "weight": 2 }, { "content": "A woman wearing a black coat and a black hat appears at the top right of the screen.", "type": "attribute", "weight": 2 }, { "content": "The woman is walking to the left side of the screen.", "type": "action", "weight": 2 }, { "content": "The top of the stairs is surrounded by a transparent railing with black handrails.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 21 }, { "index": "TUNA_0923", "video_path": "MiraData/video_clips/000005062/000005062893.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 24.88, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a dense, lush green foliage.The leaves of the foliage are abundant and thriving. As the camera slowly pans upwards, the outlines of some buildings become visible through the gaps between the trees, while the treetops sway gently in the wind. The sky in the distance is covered with thick clouds, appearing grayish.\nThe camera continues to pan upwards, revealing that the distant buildings all have white rooftops. A large building nearby enters the scene, showcasing its steel scenework at the top. The rooftop of the building is white, and the walls are red. On the exterior wall, the logo of \"Aston Villa Football Club\" can be seen.\nAs the camera keeps panning upwards, an aerial view of a stadium appears. The stadium's stands are mainly red, adorned with white patterns and letter logos. The lawn in the center of the field is lush and green.\nThe camera continues panning upward, capturing a cluster of buildings near the stadium. Surrounding the stadium are a ring of small red houses, while further in the distance are large white bungalows. Green trees are interspersed among these buildings. In the background, vehicles are constantly moving along a road, and the distant hillsides are covered with vegetation.", "events": [ { "event": "At the beginning of the video, the camera focuses on a dense, lush green foliage.The leaves of the foliage are abundant and thriving. As the camera slowly pans upwards, the outlines of some buildings become visible through the gaps between the trees, while the treetops sway gently in the wind. The sky in the distance is covered with thick clouds, appearing grayish.", "visual_elements": [ { "content": "The camera focuses on a dense, lush green foliage.", "type": "camera", "weight": 3 }, { "content": "The leaves of the foliage are abundant and thriving.", "type": "attribute", "weight": 2 }, { "content": "The camera slowly pans upwards.", "type": "camera", "weight": 3 }, { "content": "Through the gaps between the trees, the outlines of some buildings become visible.", "type": "scene", "weight": 1 }, { "content": "The treetops sway gently in the wind.", "type": "attribute", "weight": 2 }, { "content": "The sky in the distance is covered with thick clouds", "type": "scene", "weight": 1 }, { "content": "The clouds appear grayish.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to pan upwards, revealing that the distant buildings all have white rooftops. A large building nearby enters the scene, showcasing its steel scenework at the top. The rooftop of the building is white, and the walls are red. On the exterior wall, the logo of \"Aston Villa Football Club\" can be seen.", "visual_elements": [ { "content": "The camera continues to pan upwards.", "type": "camera", "weight": 3 }, { "content": "The distant buildings all have white rooftops.", "type": "scene", "weight": 2 }, { "content": "A large building nearby enters the scene, showcasing its steel scenework at the top.", "type": "attribute", "weight": 2 }, { "content": "The roof of the building is white.", "type": "attribute", "weight": 1 }, { "content": "The walls of the building are red.", "type": "attribute", "weight": 2 }, { "content": "On the exterior wall, the logo of \"Aston Villa Football Club\" can be seen.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera keeps panning upwards, an aerial view of a stadium appears. The stadium's stands are mainly red, adorned with white patterns and letter logos. The lawn in the center of the field is lush and green.", "visual_elements": [ { "content": "The camera keeps panning upwards.", "type": "camera", "weight": 3 }, { "content": "An aerial view of a stadium appears.", "type": "attribute", "weight": 3 }, { "content": "The stadium's stands are mainly red", "type": "attribute", "weight": 2 }, { "content": "There are white patterns and letter logos on top of the stands.", "type": "attribute", "weight": 2 }, { "content": "The lawn in the center of the field is lush and green.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues panning upward, capturing a cluster of buildings near the stadium. Surrounding the stadium are a ring of small red houses, while further in the distance are large white bungalows. Green trees are interspersed among these buildings. In the background, vehicles are constantly moving along a road, and the distant hillsides are covered with vegetation.", "visual_elements": [ { "content": "The camera continues panning upward.", "type": "camera", "weight": 3 }, { "content": "A cluster of buildings near the stadium enters the scene.", "type": "attribute", "weight": 1 }, { "content": "Surrounding the stadium is a ring of small red houses.", "type": "attribute", "weight": 1 }, { "content": "Further away from the stadium are large white bungalows.", "type": "scene", "weight": 2 }, { "content": "Green trees are interspersed among these buildings.", "type": "scene", "weight": 1 }, { "content": "In the background, vehicles are constantly moving along a road.", "type": "scene", "weight": 1 }, { "content": "The distant hillsides are covered with vegetation.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0924", "video_path": "MiraData/video_clips/000005063/000005063016.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 20.16, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera blurred and the frame is a dark and blurry streak.\nThe camera proceeds to move to the right and a black plastic basket is placed on the dark ground, which is filled with green onions. To the right of the basket is a black sign with white writing on it.", "events": [ { "event": "The video begins with the camera blurred and the frame is a dark and blurry streak.", "visual_elements": [ { "content": "The camera is blurred", "type": "camera", "weight": 3 }, { "content": "The frame is a dark and blurry streak.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera proceeds to move to the right and a black plastic basket is placed on the dark ground, which is filled with green onions. To the right of the basket is a black sign with white writing on it.", "visual_elements": [ { "content": "The camera proceeds to move to the right.", "type": "camera", "weight": 3 }, { "content": "A black plastic basket is placed on the dark ground.", "type": "scene", "weight": 3 }, { "content": "A black plastic basket is filled with green onions.", "type": "attribute", "weight": 3 }, { "content": "To the right of the basket is a black sign.", "type": "attribute", "weight": 2 }, { "content": "The sign says “LEEKS £4/KG” in white letters.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 7 }, { "index": "TUNA_0925", "video_path": "MiraData/video_clips/000005063/000005063273.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 20.96, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera faces the sky, revealing a vast blue sky adorned with white clouds. The top of a building with earthy yellow brick walls is visible at the left edge of the scene, encircled by a white fence. In the upper right corner, the top part of a streetlamp is visible.\nThe camera begins to pan downwards, revealing the wall of the brick building on the right. Some dark shadows of birds fly across the wall. At the base of the building, a circle of blue billboards surrounds it. Behind the brick building stands a white structure with the logo \"CHELSEA FC\" displayed prominently. A row of red houses appears on the left side of the scene.\nThe camera continues to pan downwards, showing several blue human-shaped stands in front of the white building. The stands have white numbers and names on them. Billboards line both sides of the road, some featuring players wearing jerseys. People are walking in front of the white building and on the left side of the road. At the end of the road, there are some withered trees.", "events": [ { "event": "At the beginning of the video, the camera faces the sky, revealing a vast blue sky adorned with white clouds. The top of a building with earthy yellow brick walls is visible at the left edge of the scene, encircled by a white fence. In the upper right corner, the top part of a streetlamp is visible.", "visual_elements": [ { "content": "The camera faces the sky.", "type": "camera", "weight": 3 }, { "content": "Blue skies and white clouds come into view.", "type": "attribute", "weight": 3 }, { "content": "At the left edge of the scene is the top of a building with earthy yellow brick walls.", "type": "attribute", "weight": 2 }, { "content": "A white fence surrounds the top of the building.", "type": "attribute", "weight": 1 }, { "content": "In the upper right corner of the scene is the top part of a streetlamp.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera begins to pan downwards, revealing the wall of the brick building on the right. Some dark shadows of birds fly across the wall. At the base of the building, a circle of blue billboards surrounds it. Behind the brick building stands a white structure with the logo \"CHELSEA FC\" displayed prominently. A row of red houses appears on the left side of the scene.", "visual_elements": [ { "content": "The camera begins to pan downwards.", "type": "camera", "weight": 3 }, { "content": "The wall of the brick building on the right is revealed.", "type": "attribute", "weight": 2 }, { "content": "Some dark shadows of birds fly across the wall.", "type": "attribute", "weight": 2 }, { "content": "The base of the building is surrounded by a circle of blue billboards.", "type": "attribute", "weight": 2 }, { "content": "Behind the brick building stands a white structure.", "type": "attribute", "weight": 2 }, { "content": "The structure has the logo \"CHELSEA FC\" on it.", "type": "attribute", "weight": 2 }, { "content": "A row of red houses appears on the left side of the scene.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to pan downwards, showing several blue human-shaped stands in front of the white building. The stands have white numbers and names on them. Billboards line both sides of the road, some featuring players wearing jerseys. People are walking in front of the white building and on the left side of the road. At the end of the road, there are some withered trees.", "visual_elements": [ { "content": "The camera continues to pan downwards.", "type": "camera", "weight": 3 }, { "content": "In front of the white building are several blue human-shaped stands.", "type": "attribute", "weight": 2 }, { "content": "The stands have white numbers and names on them.", "type": "attribute", "weight": 2 }, { "content": "Billboards line both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "Some billboards feature players wearing jerseys.", "type": "attribute", "weight": 1 }, { "content": "People are walking in front of the white building and on the left side of the road.", "type": "action", "weight": 2 }, { "content": "At the end of the road, there are some withered trees.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 19 }, { "index": "TUNA_0926", "video_path": "MiraData/video_clips/000005063/000005063317.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 23.04, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a modern, white cylindrical building with a circle of glass windows at the top. The background features a blue sky with some white clouds, and in the distance, one can see some hills and the urban landscape.\nThe camera pans down to the left, shifting the view from the front of the building to its side. Each floor of the building's side has large glass windows. To the left of the building, there is a water area where many yachts and small boats are docked.\nThe camera continues to turn to the left, and the cylindrical building exits the scene. Houses are neatly arranged along both banks of the water area. The red houses on the left bank have dark-colored roofs. Behind the building cluster on the right bank, there is an entire area of mudflats. The hillsides in the background are covered with greenery, and buildings extend halfway up the mountains.", "events": [ { "event": "At the beginning of the video, the camera focuses on a modern, white cylindrical building with a circle of glass windows at the top. The background features a blue sky with some white clouds, and in the distance, one can see some hills and the urban landscape.", "visual_elements": [ { "content": "The camera focuses on a modern white cylindrical building.", "type": "camera", "weight": 3 }, { "content": "The top of the cylindrical building has a circle of glass windows.", "type": "attribute", "weight": 2 }, { "content": "The background is a blue sky with some white clouds.", "type": "scene", "weight": 2 }, { "content": "In the distance, one can see some hills and urban landscapes.", "type": "scene", "weight": 1 } ] }, { "event": "The camera pans down to the left, shifting the view from the front of the building to its side. Each floor of the building's side has large glass windows. To the left of the building, there is a water area where many yachts and small boats are docked.", "visual_elements": [ { "content": "The camera pans down to the left.", "type": "camera", "weight": 3 }, { "content": "The view shifts from the front of the building to its side.", "type": "camera", "weight": 3 }, { "content": "Each floor of the building's side has large glass windows.", "type": "attribute", "weight": 2 }, { "content": "To the left of the building, there is a water area.", "type": "attribute", "weight": 2 }, { "content": "Many yachts and small boats are docked on the water area.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to turn to the left, and the cylindrical building exits the scene. Houses are neatly arranged along both banks of the water area. The red houses on the left bank have dark-colored roofs. Behind the building cluster on the right bank, there is an entire area of mudflats. The hillsides in the background are covered with greenery, and buildings extend halfway up the mountains.", "visual_elements": [ { "content": "The camera continues to pan to the left.", "type": "camera", "weight": 3 }, { "content": "The cylindrical building exits the scene.", "type": "attribute", "weight": 1 }, { "content": "Houses are neatly arranged along both banks of the water area.", "type": "attribute", "weight": 2 }, { "content": "The red houses on the left bank have dark-colored roofs.", "type": "attribute", "weight": 2 }, { "content": "Behind the building cluster on the right bank, there is an entire area of mudflats.", "type": "attribute", "weight": 1 }, { "content": "The hillsides in the background are covered with greenery.", "type": "scene", "weight": 2 }, { "content": "Buildings extend halfway up the mountains.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0927", "video_path": "MiraData/video_clips/000005063/000005063691.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 23.04, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a magnificent building, primarily featuring the exterior of the Houses of Parliament. The exterior walls of the building are constructed from light-colored stone, adorned with exquisite carvings, showcasing the Gothic architectural style. The windows are presented in elongated arches with delicate window scenes, and bright sunlight shines on the building.\nThe camera pans slowly and to the left, and a pointed section of the exterior wall neatly appears in the scene. In the background, the spires of distant buildings can be seen, and an airplane flies across the blue sky in the distance.\nThe camera continues to panning to the left, bringing the left side of the building into view. The top of the central building boasts multiple spires, each adorned with intricate carvings and decorations. A flagpole stands at the very center of the top, flying the Union Jack, which flutters in the wind. A larger area of blue sky appears in the background.", "events": [ { "event": "At the beginning of the video, the camera focuses on a magnificent building, primarily featuring the exterior of the Houses of Parliament. The exterior walls of the building are constructed from light-colored stone, adorned with exquisite carvings, showcasing the Gothic architectural style. The windows are presented in elongated arches with delicate window scenes, and bright sunlight shines on the building.", "visual_elements": [ { "content": "The camera focuses on a magnificent building.", "type": "camera", "weight": 3 }, { "content": "This building is the Houses of Parliament.", "type": "attribute", "weight": 1 }, { "content": "The exterior walls of the building are constructed from light-colored stone.", "type": "attribute", "weight": 2 }, { "content": "The exterior walls of the building are adorned with exquisite carvings.", "type": "attribute", "weight": 1 }, { "content": "The building is in the Gothic architectural style.", "type": "attribute", "weight": 2 }, { "content": "The windows are presented in elongated arches.", "type": "attribute", "weight": 2 }, { "content": "Bright sunlight shines on the building.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans slowly and to the left, and a pointed section of the exterior wall neatly appears in the scene. In the background, the spires of distant buildings can be seen, and an airplane flies across the blue sky in the distance.", "visual_elements": [ { "content": "The camera pans slowly and to the left.", "type": "camera", "weight": 3 }, { "content": "A pointed section of the exterior wall neatly appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "In the background, the spires of distant buildings can be seen.", "type": "scene", "weight": 1 }, { "content": "An airplane flies across the blue sky in the distance.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to panning to the left, bringing the left side of the building into view. The top of the central building boasts multiple spires, each adorned with intricate carvings and decorations. A flagpole stands at the very center of the top, flying the Union Jack, which flutters in the wind. A larger area of blue sky appears in the background.", "visual_elements": [ { "content": "The camera continues to panning to the left.", "type": "camera", "weight": 3 }, { "content": "The left side of the building enters the view.", "type": "attribute", "weight": 1 }, { "content": "The top of the central building boasts multiple spires.", "type": "attribute", "weight": 2 }, { "content": "Each spire is adorned with intricate carvings and decorations.", "type": "attribute", "weight": 2 }, { "content": "A flagpole stands at the very center of the top.", "type": "attribute", "weight": 2 }, { "content": "The Union Jack flutters in the wind.", "type": "attribute", "weight": 2 }, { "content": "A larger area of blue sky appears in the background.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0928", "video_path": "MiraData/video_clips/000005063/000005063705.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 28.32, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on the exterior of a large building, with a white mural painted on the left exterior wall of the building. In front of the building, there are two houses decorated as boxes. The lids of the boxes are black, and the boxes themselves are red, adorned with the \"Arsenal\" logo. A black partition net stands in front of the houses, and a long staircase leads to the large building in the back of the scene.\nThe camera pans to the right, revealing the glass curtain wall on the front of the building. Through the glass, some tables, chairs, and decorations inside are visible. Subsequently, the camera pans up and to the right, showing a red welcome banner hanging at the entrance of the building's ground floor. The text on the banner is in white. Two men walk past the door, one wearing a black short-sleeved shirt and the other in a white short-sleeved shirt.\nThe camera continues to pan up and to the right, showcasing the right half of the building. On the wall, there is a giant \"Arsenal\" logo, with murals of several players behind it. Their backs face the camera, and their jerseys display the players' names and numbers. The players have their arms draped over each other's shoulders. In the foreground, a red car drives by.", "events": [ { "event": "At the beginning of the video, the camera focuses on the exterior of a large building, with a white mural painted on the left exterior wall of the building. In front of the building, there are two houses decorated as boxes. The lids of the boxes are black, and the boxes themselves are red, adorned with the \"Arsenal\" logo. A black partition net stands in front of the houses, and a long staircase leads to the large building in the back of the scene.", "visual_elements": [ { "content": "The camera focuses on the exterior of a large building.", "type": "camera", "weight": 3 }, { "content": "A white mural is painted on the left exterior wall of the building.", "type": "attribute", "weight": 2 }, { "content": "In front of the building, there are two houses decorated as boxes.", "type": "attribute", "weight": 3 }, { "content": "The lids of the boxes are black.", "type": "attribute", "weight": 1 }, { "content": "The boxes are red.", "type": "attribute", "weight": 2 }, { "content": "The boxes are adorned with the \"Arsenal\" logo.", "type": "attribute", "weight": 2 }, { "content": "In front of the house, there is a black partition net.", "type": "attribute", "weight": 1 }, { "content": "In front of the net, there is a long staircase.", "type": "attribute", "weight": 2 }, { "content": "The staircase leads to a large building in the back of the scene.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans to the right, revealing the glass curtain wall on the front of the building. Through the glass, some tables, chairs, and decorations inside are visible. Subsequently, the camera pans up and to the right, showing a red welcome banner hanging at the entrance of the building's ground floor. The text on the banner is in white. Two men walk past the door, one wearing a black short-sleeved shirt and the other in a white short-sleeved shirt.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "The glass curtain wall on the front of the building appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "Through the glass, some tables, chairs, and decorations inside are visible.", "type": "attribute", "weight": 1 }, { "content": "The camera pans up and to the right.", "type": "camera", "weight": 3 }, { "content": "A red welcome banner hangs at the entrance of the building's ground floor.", "type": "attribute", "weight": 2 }, { "content": "The text on the banner is in white.", "type": "attribute", "weight": 1 }, { "content": "Two men walk past the door.", "type": "action", "weight": 2 }, { "content": "One man wears a black short-sleeved shirt.", "type": "attribute", "weight": 1 }, { "content": "The other man wears a white short-sleeved shirt.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to pan up and to the right, showcasing the right half of the building. On the wall, there is a giant \"Arsenal\" logo, with murals of several players behind it. Their backs face the camera, and their jerseys display the players' names and numbers. The players have their arms draped over each other's shoulders. In the foreground, a red car drives by.", "visual_elements": [ { "content": "The camera continues to pan up and to the right.", "type": "camera", "weight": 3 }, { "content": "A giant \"Arsenal\" logo is on the right wall of the building.", "type": "attribute", "weight": 3 }, { "content": "Murals of several players are behind the logo.", "type": "attribute", "weight": 3 }, { "content": "The players' backs face the camera.", "type": "attribute", "weight": 2 }, { "content": "Their jerseys display the players' names and numbers.", "type": "attribute", "weight": 1 }, { "content": "The players have their arms draped over each other's shoulders.", "type": "attribute", "weight": 2 }, { "content": "In the foreground, a red car drives by.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 25 }, { "index": "TUNA_0929", "video_path": "MiraData/video_clips/000005063/000005063710.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 20.52, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on two black cannons mounted on white carriages with black wheels. The cannons are situated in a spacious plaza. To the left of the scene, there is a green tree, and in the background, a large building stands out.\nAs the camera moves forward, it also tilts upwards to reveal a storefront with the words \"BOX OFFICE\" written above it. Above the storefront, a black grill is adorned with white emblems and letters. A person is seated on a white chair outside the store.\nThe camera rotates to the right, fully exposing the word \"ARSENAL\" written on the black grill. Above the store entrance, the words \"THE ARMOURY\" are displayed. A man wearing a black short-sleeved shirt exits the store while talking on the phone. People can be seen both outside and inside the store. Posters are displayed in the windows on both sides of the store. Above the rear of the store, there is a huge mural depicting the backs of several footballers standing side by side. Above the mural, the words \"EMIRATES STADIUM\" are written, and just below it, there is a red logo and the words \"CHAPMAN'S\". To the right of the mural, there is an entire wall made of glass.", "events": [ { "event": "At the beginning of the video, the camera focuses on two black cannons mounted on white carriages with black wheels. The cannons are situated in a spacious plaza. To the left of the scene, there is a green tree, and in the background, a large building stands out.", "visual_elements": [ { "content": "The camera focuses on two black cannons.", "type": "camera", "weight": 3 }, { "content": "The cannons are mounted on white carriages.", "type": "attribute", "weight": 3 }, { "content": "The wheels of the carriages are black.", "type": "attribute", "weight": 2 }, { "content": "The cannons are located in an open plaza.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the scene, there is a green tree.", "type": "attribute", "weight": 2 }, { "content": "In the background, there stands a large building.", "type": "scene", "weight": 2 } ] }, { "event": "As the camera moves forward, it also tilts upwards to reveal a storefront with the words \"BOX OFFICE\" written above it. Above the storefront, a black grill is adorned with white emblems and letters. A person is seated on a white chair outside the store.", "visual_elements": [ { "content": "As the camera moves forward, it simultaneously tilts upwards.", "type": "camera", "weight": 3 }, { "content": "A storefront with the words \"BOX OFFICE\" written on it comes into view.", "type": "attribute", "weight": 2 }, { "content": "Above, a black grating is adorned with white emblems and letters.", "type": "attribute", "weight": 2 }, { "content": "A person is seated on a white chair outside the store.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera rotates to the right, fully exposing the word \"ARSENAL\" written on the black grill. Above the store entrance, the words \"THE ARMOURY\" are displayed. A man wearing a black short-sleeved shirt exits the store while talking on the phone. People can be seen both outside and inside the store. Posters are displayed in the windows on both sides of the store. Above the rear of the store, there is a huge mural depicting the backs of several footballers standing side by side. Above the mural, the words \"EMIRATES STADIUM\" are written, and just below it, there is a red logo and the words \"CHAPMAN'S\". To the right of the mural, there is an entire wall made of glass.", "visual_elements": [ { "content": "The camera rotates to the right.", "type": "camera", "weight": 3 }, { "content": "The letters \"ARSENAL\" on the black grating are fully visible.", "type": "attribute", "weight": 2 }, { "content": "Above the entrance of the store, the words \"THE ARMOURY\" are displayed.", "type": "attribute", "weight": 2 }, { "content": "A male wearing a black short-sleeved shirt exits the store while talking on the phone.", "type": "action", "weight": 2 }, { "content": "People can be seen both at the entrance and inside the store.", "type": "attribute", "weight": 1 }, { "content": "Posters are displayed in the windows on both sides of the store.", "type": "attribute", "weight": 2 }, { "content": "Above the rear of the store, there is a huge mural .", "type": "attribute", "weight": 2 }, { "content": "The mural depicts the backs of several footballers.", "type": "attribute", "weight": 2 }, { "content": "They are standing side by side.", "type": "attribute", "weight": 2 }, { "content": "Above the mural, the words \"EMIRATES STADIUM\" are written.", "type": "attribute", "weight": 2 }, { "content": "Directly below the mural, there is a red emblem with the words \"CHAPMAN'S\" on it.", "type": "attribute", "weight": 2 }, { "content": "To the right of the mural, there is an entire wall of glass.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0930", "video_path": "MiraData/video_clips/000005063/000005063758.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 29.0, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, on the left side of the scene is a high-rise building with numerous glass windows. Vehicles are driving on the curved road to the right of the building. To the right of the road, there is first a curved circular viaduct, and behind the bridge stands a huge glass curtain wall. On the side closer to the camera, there is a large orange sign with the word \"EE\" written on it. Below the sign, there is a green sticker with the words \"GREEN ZONE\" on it. In the background, there are many tall buildings.\nThe camera rotates to the right, and the entire building with the glass curtain wall comes into view. There is a huge arched structure on top of the building. The glass curtain wall on the front of the building displays the words \"WEMBLEY CONNECTED BY EE\". In the open space in front of the building, there are two white pillars. To the right of the building's entrance, there is an uphill slope, and pedestrians are walking down it. In the background, electricity poles and gray buildings are visible.", "events": [ { "event": "At the beginning of the video, on the left side of the scene is a high-rise building with numerous glass windows. Vehicles are driving on the curved road to the right of the building. To the right of the road, there is first a curved circular viaduct, and behind the bridge stands a huge glass curtain wall. On the side closer to the camera, there is a large orange sign with the word \"EE\" written on it. Below the sign, there is a green sticker with the words \"GREEN ZONE\" on it. In the background, there are many tall buildings.", "visual_elements": [ { "content": "On the left side of the scene is a high-rise building with numerous glass windows.", "type": "attribute", "weight": 3 }, { "content": "Vehicles are driving on the curved road to the right of the building.", "type": "action", "weight": 1 }, { "content": "To the right of the road, there is first a curved circular viaduct.", "type": "attribute", "weight": 2 }, { "content": "Behind the bridge, there stands a huge glass curtain wall.", "type": "attribute", "weight": 3 }, { "content": "On the side closer to the camera, there is a large orange sign.", "type": "attribute", "weight": 2 }, { "content": "The sign reads \"EE\".", "type": "attribute", "weight": 2 }, { "content": "Below it, there is a green sticker attached.", "type": "attribute", "weight": 2 }, { "content": "The sticker has the words \"GREEN ZONE\" on it.", "type": "attribute", "weight": 1 }, { "content": "In the background, there are many tall buildings.", "type": "scene", "weight": 2 } ] }, { "event": "The camera rotates to the right, and the entire building with the glass curtain wall comes into view. There is a huge arched structure on top of the building. The glass curtain wall on the front of the building displays the words \"WEMBLEY CONNECTED BY EE\". In the open space in front of the building, there are two white pillars. To the right of the building's entrance, there is an uphill slope, and pedestrians are walking down it. In the background, electricity poles and gray buildings are visible.", "visual_elements": [ { "content": "The camera rotates to the right.", "type": "camera", "weight": 3 }, { "content": "The entire building with the glass curtain wall comes into view.", "type": "attribute", "weight": 2 }, { "content": "There is a huge arched structure on top of the building.", "type": "attribute", "weight": 2 }, { "content": "On the glass curtain wall of the building's front, the words \"WEMBLEY CONNECTED BY EE\" can be seen.", "type": "attribute", "weight": 3 }, { "content": "In the open space in front of the building, there are two white pillars.", "type": "attribute", "weight": 2 }, { "content": "To the right of the building's entrance, there is an uphill slope.", "type": "attribute", "weight": 2 }, { "content": "Pedestrians are walking down the slope.", "type": "action", "weight": 1 }, { "content": "In the background, electricity poles and gray buildings are visible.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0931", "video_path": "MiraData/video_clips/000005063/000005063773.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 28.76, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a gray concrete wall. Subsequently, the camera rapidly pans to the right, and a large building enters the scene, with its exterior dominated by large glass windows.\nThe camera continues to pan to the right, showing multiple entrances at the base of the building. Above the entrances, there is a green sign that reads \"GREEN ZONE\". In front of the entrance area, there are pillars and signboards, with sky-blue signboards displaying various information and instructions.\nThe camera slowly pans up and to the right, showing a huge white arched steel structure above the building. This steel structure extends from the front to the back of the building, spanning the entire building diagonally. There are some thin support cables on both sides of the steel structure. The backgroud is a blue sky.", "events": [ { "event": "At the beginning of the video, the camera focuses on a gray concrete wall. Subsequently, the camera rapidly pans to the right, and a large building enters the scene, with its exterior dominated by large glass windows.", "visual_elements": [ { "content": "The camera focuses on a gray concrete wall.", "type": "camera", "weight": 3 }, { "content": "Subsequently, the camera rapidly pans to the right.", "type": "camera", "weight": 3 }, { "content": "A large building enters the scene.", "type": "scene", "weight": 3 }, { "content": "The building's exterior is dominated by large glass windows.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to pan to the right, showing multiple entrances at the base of the building. Above the entrances, there is a green sign that reads \"GREEN ZONE\". In front of the entrance area, there are pillars and signboards, with sky-blue signboards displaying various information and instructions.", "visual_elements": [ { "content": "The camera continues to pan to the right.", "type": "camera", "weight": 3 }, { "content": "Multiple entrances are visible at the base of the building.", "type": "attribute", "weight": 2 }, { "content": "Above the entrances, there is a green sign.", "type": "attribute", "weight": 2 }, { "content": "The sign reads \"GREEN ZONE\".", "type": "attribute", "weight": 1 }, { "content": "In front of the entrance area, there are pillars and signboards.", "type": "attribute", "weight": 2 }, { "content": "Sky-blue signboards display various information and instructions.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera slowly pans up and to the right, showing a huge white arched steel structure above the building. This steel structure extends from the front to the back of the building, spanning the entire building diagonally. There are some thin support cables on both sides of the steel structure. The backgroud is a blue sky.", "visual_elements": [ { "content": "The camera slowly pans up and to the right.", "type": "camera", "weight": 3 }, { "content": "Above the building, there is a huge white arched steel structure.", "type": "attribute", "weight": 3 }, { "content": "The steel structure stretches from the front to the back of the building.", "type": "attribute", "weight": 2 }, { "content": "The steel structure spans the entire building diagonally.", "type": "attribute", "weight": 2 }, { "content": "There are some thin support cables on both sides of the steel structure.", "type": "attribute", "weight": 1 }, { "content": "The backgroud is a blue sky.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0932", "video_path": "MiraData/video_clips/000005063/000005063806.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 24.24, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a man and a woman walking on a green path. The young white woman on the left side of the screen has short black curly hair, wears a yellow turtleneck sweater and black pants, carries a black backpack, and holds a notebook in her hand. The young man on the right wears black-framed glasses, a gray plaid jacket and a white inner layer on the upper body, and blue jeans on the lower body. He holds a few books in his left hand and a mobile phone with a blue case in his right hand. In the background, the trees on the roadside are lush and green, and the trees on the left side of the screen are surrounded by black railings. There are pedestrians passing by in the blur on the right side of the screen.\nThe camera gradually moves backwards as the two people walk forward. They smile and talk as they walk. They look down at the phone in the man's hand and occasionally look up at each other. At the end of the video, the woman walks out of the screen from the left side and the man walks out of the screen from the right side.", "events": [ { "event": "At the beginning of the video, the camera is focused on a man and a woman walking on a green path. The young white woman on the left side of the screen has short black curly hair, wears a yellow turtleneck sweater and black pants, carries a black backpack, and holds a notebook in her hand. The young man on the right wears black-framed glasses, a gray plaid jacket and a white inner layer on the upper body, and blue jeans on the lower body. He holds a few books in his left hand and a mobile phone with a blue case in his right hand. In the background, the trees on the roadside are lush and green, and the trees on the left side of the screen are surrounded by black railings. There are pedestrians passing by in the blur on the right side of the screen.", "visual_elements": [ { "content": "The camera is focused on a man and a woman walking on a green path.", "type": "camera", "weight": 3 }, { "content": "The young white woman on the left side of the screen has short black curly hair, a yellow turtleneck sweater and black pants.", "type": "attribute", "weight": 3 }, { "content": "The woman carries a black backpack on her right shoulder and holds a notebook in her hand.", "type": "attribute", "weight": 2 }, { "content": "The young man on the right side of the screen wears black-framed glasses, a gray plaid jacket with a white inner layer, and blue jeans.", "type": "attribute", "weight": 3 }, { "content": "The man holds a few books in his left hand and a mobile phone with a blue case in his right hand.", "type": "attribute", "weight": 2 }, { "content": "In the background, the trees on the roadside are lush and green.", "type": "scene", "weight": 3 }, { "content": "The trees on the left side of the screen are surrounded by black railings.", "type": "scene", "weight": 1 }, { "content": "There are pedestrians passing by in the blur on the right side of the screen.", "type": "scene", "weight": 1 } ] }, { "event": "The camera gradually moves backwards as the two people walk forward. They smile and talk as they walk. They look down at the phone in the man's hand and occasionally look up at each other. At the end of the video, the woman walks out of the screen from the left side and the man walks out of the screen from the right side.", "visual_elements": [ { "content": "The camera gradually moves backwards as the two people walk forward.", "type": "camera", "weight": 3 }, { "content": "The two people walk and talk while smiling.", "type": "action", "weight": 3 }, { "content": "The two people look down at the mobile phone in the man's hand, and occasionally look up at each other.", "type": "action", "weight": 2 }, { "content": "The woman walks out of the screen from the left side of the screen, and the man walks out of the screen from the right side of the screen.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 12 }, { "index": "TUNA_0933", "video_path": "MiraData/video_clips/000005063/000005063822.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Urban Activity", "duration": 22.72, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on an off-white wall with some small holes and dents.\nThe camera slowly moves to the right. A young white woman with short curly brown hair, wearing a yellow top and black pants appears in the picture. She stands by the white railing against the wall, looking down at the phone in her hand, her fingers sliding on the phone from time to time, and a smile gradually appears on her face. The background is blurred, the water surface under the railing is sparkling, and the surrounding buildings have white walls and large glass windows.", "events": [ { "event": "The video begins with the camera focused on an off-white wall with some small holes and dents.", "visual_elements": [ { "content": "The camera is pointed at a gray-white wall.", "type": "camera", "weight": 3 }, { "content": "There are some small holes and dents on the wall.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera slowly moves to the right. A young white woman with short curly brown hair, wearing a yellow top and black pants appears in the picture. She stands by the white railing against the wall, looking down at the phone in her hand, her fingers sliding on the phone from time to time, and a smile gradually appears on her face. The background is blurred, the water surface under the railing is sparkling, and the surrounding buildings have white walls and large glass windows.", "visual_elements": [ { "content": "The camera moves slowly to the right.", "type": "camera", "weight": 3 }, { "content": "A young white woman enters the picture from the left side of the picture.", "type": "attribute", "weight": 3 }, { "content": "The woman has short curly brown hair and wears a yellow top and black pants.", "type": "attribute", "weight": 2 }, { "content": "The woman stands by the white railing against the wall, looking down at the phone in her hand, her fingers sliding on the phone from time to time, and a smile gradually appears on her face.", "type": "action", "weight": 3 }, { "content": "In the background, the water surface under the railing is sparkling.", "type": "scene", "weight": 3 }, { "content": "The surrounding buildings have white walls and large glass windows.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0934", "video_path": "MiraData/video_clips/000005064/000005064295.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 28.8, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with an overhead shot of a marina full of yachts. The marina is lined with rows of yachts of various types and sizes, including sailboats and powerboats. Some buildings and a parking lot can be seen in the upper left corner of the marina, and in the upper right corner is a bridge with a waterway underneath it.\nThe camera continues to pan right until the end of the marina, showing more yachts and the structure of the marina. At the end of the marina is a long trestle bridge connecting more yachts. There are a few people walking on the trestle and the bridge and waterway are still visible in the background.\nThe camera moves down to the lower left until it is close enough to the water. The scene eventually settles on this image.", "events": [ { "event": "The video begins with an overhead shot of a marina full of yachts. The marina is lined with rows of yachts of various types and sizes, including sailboats and powerboats. Some buildings and a parking lot can be seen in the upper left corner of the marina, and in the upper right corner is a bridge with a waterway underneath it.", "visual_elements": [ { "content": "The camera shoots from a high vantage point of a marina full of yachts.", "type": "camera", "weight": 3 }, { "content": "The marina is lined with rows of yachts.", "type": "attribute", "weight": 3 }, { "content": "Yachts come in all types and sizes, from sailboats to powerboats.", "type": "attribute", "weight": 2 }, { "content": "There are some buildings and a parking lot in the upper left corner of the marina, and a bridge in the upper right corner.", "type": "attribute", "weight": 1 }, { "content": "There is a waterway underneath the bridge.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to pan right until the end of the marina, showing more yachts and the structure of the marina. At the end of the marina is a long trestle bridge connecting more yachts. There are a few people walking on the trestle and the bridge and waterway are still visible in the background.", "visual_elements": [ { "content": "The camera continues to pan right until the end of the marina.", "type": "camera", "weight": 3 }, { "content": "A long trestle bridge connects more yachts, with a few people walking on the bridge.", "type": "scene", "weight": 3 }, { "content": "Bridges and waterways are clearly visible.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves down to the lower left until it is close enough to the water. The scene eventually settles on this image.", "visual_elements": [ { "content": "The camera moves down to the lower left until it is close enough to the water.", "type": "camera", "weight": 3 }, { "content": "The scene eventually settles on this image.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 10 }, { "index": "TUNA_0935", "video_path": "MiraData/video_clips/000005064/000005064314.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Cityscape", "duration": 23.2, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera overlooks a water area and the surrounding buildings from above. White guardrails are installed on both sides of the sidewalk along the shore. Behind the road, there stands a white modern building, with a red-brick building next to it. The water on the lake surface is dark and turbid.\nThe camera pans backward, and a suspension bridge spanning the lake fills the entire scene. There is a white pillar in the middle on the left side of the bridge, with steel cables on both sides connecting the pillar to the bridge. Half of the road on the bridge is designed as a bicycle lane, marked with bicycle patterns painted in white paint. Pedestrians walks on the bridge, including a person dressed in a dark-colored top and white pants pushing a baby stroller.", "events": [ { "event": "At the beginning of the video, the camera overlooks a water area and the surrounding buildings from above. White guardrails are installed on both sides of the sidewalk along the shore. Behind the road, there stands a white modern building, with a red-brick building next to it. The water on the lake surface is dark and turbid.", "visual_elements": [ { "content": "The camera overlooks a water area and the surrounding buildings from above.", "type": "camera", "weight": 3 }, { "content": "White guardrails are installed on both sides of the sidewalk along the shore.", "type": "attribute", "weight": 1 }, { "content": "Behind the road, there stands a white modern building.", "type": "attribute", "weight": 2 }, { "content": "Next to the white building, there is a red-brick building.", "type": "attribute", "weight": 2 }, { "content": "The water on the lake surface is dark and turbid.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans backward, and a suspension bridge spanning the lake fills the entire scene. There is a white pillar in the middle on the left side of the bridge, with steel cables on both sides connecting the pillar to the bridge. Half of the road on the bridge is designed as a bicycle lane, marked with bicycle patterns painted in white paint. Pedestrians walks on the bridge, including a person dressed in a dark-colored top and white pants pushing a baby stroller.", "visual_elements": [ { "content": "The camera pans backward.", "type": "camera", "weight": 3 }, { "content": "A suspension bridge spanning the lake fills the entire scene.", "type": "attribute", "weight": 3 }, { "content": "In the middle on the left outer side of the bridge, there stands a white pillar.", "type": "attribute", "weight": 2 }, { "content": "Steel cables on both sides of the pillar connect it to the bridge.", "type": "attribute", "weight": 2 }, { "content": "Half of the road on the bridge is designed as a bicycle lane.", "type": "attribute", "weight": 2 }, { "content": "Bicycle patterns are painted in white paint on the bicycle lane.", "type": "attribute", "weight": 2 }, { "content": "Pedestrians walk on the bridge.", "type": "action", "weight": 2 }, { "content": "A person dressed in a dark-colored top and white pants pushes a baby stroller.", "type": "action", "weight": 1 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0936", "video_path": "MiraData/video_clips/000005066/000005066529.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Natural Landscape", "duration": 21.88, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a blurred image of a beach with blue water, waves lapping at the sand, trees and sand in the distance, and a metal railing to the right, well lit.\nThe camera doesn't change and a little boy wearing a blue long-sleeved top appears on the right side of the frame, the little boy has brown hair. The little boy places his hand on the railing and his fingers touch the surface of the railing. The camera closes in on the upper half of the little boy's body. He rests his arms on the railing, his head rests on his arms, and he gazes at the sea in the distance. The camera closes in on the side of his face and ears, with the beach and sea still visible behind him.\nThe camera pulls away and focuses on his face. Finally the camera pulls away and moves to the left as the little boy fades into the right side of the frame, with the frame focusing on the beach and the sea.", "events": [ { "event": "The video begins with a blurred image of a beach with blue water, waves lapping at the sand, trees and sand in the distance, and a metal railing to the right, well lit.", "visual_elements": [ { "content": "The frame is blurred.", "type": "scene", "weight": 3 }, { "content": "The frame is a beach.", "type": "scene", "weight": 3 }, { "content": "The water is blue.", "type": "attribute", "weight": 2 }, { "content": "The waves are lapping at the sand.", "type": "attribute", "weight": 2 }, { "content": "There are some trees and sand in the distance.", "type": "scene", "weight": 2 }, { "content": "A metal railing on the right.", "type": "scene", "weight": 3 } ] }, { "event": "The camera doesn't change and a little boy wearing a blue long-sleeved top appears on the right side of the frame, the little boy has brown hair. The little boy places his hand on the railing and his fingers touch the surface of the railing. The camera closes in on the upper half of the little boy's body. He rests his arms on the railing, his head rests on his arms, and he gazes at the sea in the distance. The camera closes in on the side of his face and ears, with the beach and sea still visible behind him.", "visual_elements": [ { "content": "The camera doesn't change.", "type": "camera", "weight": 3 }, { "content": "A little boy wearing a blue long-sleeved top appears on the right side of the frame, the little boy has brown hair..", "type": "scene", "weight": 3 }, { "content": "The little boy places his hand on the railing and his fingers touch the surface of the railing.", "type": "action", "weight": 3 }, { "content": "The camera closes in on the upper half of the little boy's body.", "type": "camera", "weight": 3 } ] }, { "event": "The camera pulls away and focuses on his face. Finally the camera pulls away and moves to the left as the little boy fades into the right side of the frame, with the frame focusing on the beach and the sea.", "visual_elements": [ { "content": "The camera pulls away and focuses on his face.", "type": "camera", "weight": 3 }, { "content": "The camera pulls away and moves to the left.", "type": "camera", "weight": 3 }, { "content": "The little boy fades into the right side of the frame.", "type": "scene", "weight": 2 }, { "content": "The frame is focusing on the beach and the sea.", "type": "scene", "weight": 3 } ] } ], "n_events": 3, "n_elements": 14 }, { "index": "TUNA_0937", "video_path": "MiraData/video_clips/000005067/000005067912.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.03, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focusing on a child's hand reaching out to a dog. In the child's left hand is a silver bowl filled with dog food. The dog has white fur, large ears, a focused facial expression, and slightly closed eyes. The child has a yellow bracelet on his right wrist, and his fingers gently move the food to the dog's mouth. The dog on the right side of the screen sits on a light-colored mat, and while eating, his eyes are fixed on the food in the bowl. The camera is steady, with a yellow wooden floor in the background.\nAs the video progresses, the little girl takes some food out of the bowl and passes it to the dog's mouth. The dog lowers its head and concentrates on eating the food in the little girl's hand and bowl. The background is well-lit.\nFinally, the dog leaves the bowl and lowers its head to sniff the food on the sofa, its ears moving as the dog moves. The little girl takes her hand out of the bowl. The camera finally freezes as the dog raises its head and looks at the bowl opposite again.", "events": [ { "event": "The video begins with the camera focusing on a child's hand reaching out to a dog. In the child's left hand is a silver bowl filled with dog food. The dog has white fur, large ears, a focused facial expression, and slightly closed eyes. The child has a yellow bracelet on his right wrist, and his fingers gently move the food to the dog's mouth. The dog on the right side of the screen sits on a light-colored mat, and while eating, his eyes are fixed on the food in the bowl. The camera is steady, with a yellow wooden floor in the background.", "visual_elements": [ { "content": "A child's hand reaches out to a dog.", "type": "action", "weight": 3 }, { "content": "The child's left hand holds a silver bowl with dog food in it.", "type": "attribute", "weight": 3 }, { "content": "The dog has white fur, large ears, a focused facial expression, and slightly closed eyes.", "type": "attribute", "weight": 3 }, { "content": "The child wears a yellow bracelet on his right wrist.", "type": "attribute", "weight": 2 }, { "content": "The hand gently delivers the food to the dog's mouth.", "type": "action", "weight": 2 }, { "content": "The dog on the right side of the screen sits on a light-colored mat, eating while looking at the food in the bowl.", "type": "action", "weight": 3 }, { "content": "The camera is stable.", "type": "camera", "weight": 3 }, { "content": "The background is a yellow wooden floor.", "type": "scene", "weight": 3 } ] }, { "event": "As the video progresses, the little girl takes some food out of the bowl and passes it to the dog's mouth. The dog lowers its head and concentrates on eating the food in the little girl's hand and bowl. The background is well-lit.", "visual_elements": [ { "content": "As the video progresses, the little girl takes some food out of the bowl and hands it to the dog's mouth.", "type": "action", "weight": 2 }, { "content": "The dog lowers his head and concentrates on eating the food in the little girl's hand and bowl.", "type": "action", "weight": 3 }, { "content": "The background is well lit.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the dog leaves the bowl and lowers its head to sniff the food on the sofa, its ears moving as the dog moves. The little girl takes her hand out of the bowl. The camera finally freezes as the dog raises its head and looks at the bowl opposite again.", "visual_elements": [ { "content": "Finally, the dog leaves the bowl and lowers his head to sniff the food on the sofa.", "type": "action", "weight": 3 }, { "content": "The dog's ears shake as the dog moves.", "type": "action", "weight": 1 }, { "content": "The little girl takes her hand out of the bowl.", "type": "action", "weight": 2 }, { "content": "The camera finally freezes when the dog raises his head and looks at the bowl opposite again.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 15 }, { "index": "TUNA_0938", "video_path": "MiraData/video_clips/000005068/000005068339.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 20.6, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden dining table with various kinds of food placed on it. The camera moves slowly forward, first showing a plate of sliced cheese cubes. The cheese cubes are neatly arranged in a white plate, with three purple grapes and three pomegranate arils placed beside it.\nNext to the white plate is a plate of sliced bread, and several small red tomatoes are placed beside the bread. Beside the bread plate is a purple glass, and behind the glass is a circular wooden tray.\nThen the camera turns to the right and gradually gets closer to the tray. The sliced cheese cubes, purple grapes, half a pomegranate, several slices of ham, and a string of small red tomatoes on the tray can be clearly seen. There are also several green plants and two white flowers placed beside the tray for decoration.\nFinally, the camera moves backward and returns to the bread plate.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden dining table with various kinds of food placed on it. The camera moves slowly forward, first showing a plate of sliced cheese cubes. The cheese cubes are neatly arranged in a white plate, with three purple grapes and three pomegranate arils placed beside it.", "visual_elements": [ { "content": "The camera is focused on a wooden dining table.", "type": "camera", "weight": 3 }, { "content": "On the table are placed various kinds of food.", "type": "scene", "weight": 3 }, { "content": "The camera moves slowly forward.", "type": "camera", "weight": 3 }, { "content": "Firstly, it shows a plate of sliced cheese cubes.", "type": "attribute", "weight": 3 }, { "content": "The cheese cubes are neatly arranged in a white plate.", "type": "attribute", "weight": 2 }, { "content": "There are three purple grapes and three pomegranate arils placed beside the cheese.", "type": "attribute", "weight": 2 } ] }, { "event": "Next to the white plate is a plate of sliced bread, and several small red tomatoes are placed beside the bread. Beside the bread plate is a purple glass, and behind the glass is a circular wooden tray.", "visual_elements": [ { "content": "Next to the white plate is a plate of sliced bread.", "type": "attribute", "weight": 3 }, { "content": "Several small red tomatoes are placed beside the bread.", "type": "attribute", "weight": 2 }, { "content": "Beside the bread plate is a purple glass", "type": "attribute", "weight": 3 }, { "content": "Behind the glass is a circular wooden tray.", "type": "attribute", "weight": 2 } ] }, { "event": "Then the camera turns to the right and gradually gets closer to the tray. The sliced cheese cubes, purple grapes, half a pomegranate, several slices of ham, and a string of small red tomatoes on the tray can be clearly seen. There are also several green plants and two white flowers placed beside the tray for decoration.", "visual_elements": [ { "content": "The camera turns to the right.", "type": "camera", "weight": 3 }, { "content": "The camera gradually gets closer to the tray.", "type": "camera", "weight": 3 }, { "content": "The sliced cheese cubes, purple grapes, half a pomegranate, several slices of ham, and a string of small red tomatoes on the tray can be clearly seen.", "type": "attribute", "weight": 2 }, { "content": "There are also several green plants and two white flowers placed beside the tray for decoration.", "type": "scene", "weight": 1 } ] }, { "event": "Finally, the camera moves backward and returns to the bread plate.", "visual_elements": [ { "content": "The camera moves backward.", "type": "camera", "weight": 3 }, { "content": "The camera returns to the bread plate.", "type": "camera", "weight": 3 } ] } ], "n_events": 4, "n_elements": 16 }, { "index": "TUNA_0939", "video_path": "MiraData/video_clips/000005068/000005068471.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Plants", "duration": 27.72, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a blurred image and a dark background to the image.\nThe camera moves to the right and many piles of small persimmons enter the frame, the textured stems and leaves of the persimmons are clearly visible, the persimmons vary in size, and this pile of small persimmons is stacked in decreasing layers in the lower left corner of the frame.", "events": [ { "event": "The video begins with a blurred image and a dark background to the image.", "visual_elements": [ { "content": "The camera is blurred", "type": "attribute", "weight": 2 }, { "content": "The background of the image is a dark color.", "type": "scene", "weight": 2 } ] }, { "event": "The camera moves to the right and many piles of small persimmons enter the frame, the textured stems and leaves of the persimmons are clearly visible, the persimmons vary in size, and this pile of small persimmons is stacked in decreasing layers in the lower left corner of the frame.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "Many piles of small persimmons enter the frame.", "type": "attribute", "weight": 3 }, { "content": "The textured stems and leaves of the persimmons are clearly visible,", "type": "attribute", "weight": 2 }, { "content": "The persimmons vary in size.", "type": "attribute", "weight": 2 }, { "content": "Persimmons are red.", "type": "attribute", "weight": 2 }, { "content": "The stems and leaves are green.", "type": "attribute", "weight": 1 }, { "content": "This pile of small persimmons is in the lower left corner of the picture, stacked in decreasing layers.", "type": "attribute", "weight": 3 } ] } ], "n_events": 2, "n_elements": 9 }, { "index": "TUNA_0940", "video_path": "MiraData/video_clips/000005068/000005068755.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 22.8, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a white flat surface with the background being blurry. In the first few seconds, there are a few small water droplets on the white flat surface on the left side of the picture.\nSubsequently, the camera moves slowly to the right and upward, and a transparent packaging bag appears in the frame. There is a piece of red meat inside the bag. The camera continues to move to the right until the piece of meat is completely in the center of the picture.\nAt this moment, the camera moves forward slightly to focus on the piece of meat, and the texture on the surface of the meat is clearly visible.\nNext, the camera continues to move forward, and the details of the piece of meat in the picture become even clearer. The text and logo on the packaging bag also gradually come into view until the text \"Tanmaya\" and a circular red logo on the packaging bag are completely clear, and the video ends.", "events": [ { "event": "At the beginning of the video, the camera is focused on a white flat surface with the background being blurry. In the first few seconds, there are a few small water droplets on the white flat surface on the left side of the picture.", "visual_elements": [ { "content": "The camera is focused on a white flat surface.", "type": "camera", "weight": 3 }, { "content": "The background is blurry.", "type": "scene", "weight": 1 }, { "content": "There are a few small water droplets on the white flat surface on the left side of the picture.", "type": "scene", "weight": 1 } ] }, { "event": "Subsequently, the camera moves slowly to the right and upward, and a transparent packaging bag appears in the frame. There is a piece of red meat inside the bag. The camera continues to move to the right until the piece of meat is completely in the center of the picture.", "visual_elements": [ { "content": "The camera moves slowly to the right and upward.", "type": "camera", "weight": 3 }, { "content": "A transparent packaging bag appears in the frame.", "type": "attribute", "weight": 2 }, { "content": "There is a piece of red meat inside the bag.", "type": "attribute", "weight": 3 }, { "content": "The camera continues to move to the right until the piece of meat is completely in the center of the picture.", "type": "camera", "weight": 3 } ] }, { "event": "At this moment, the camera moves forward slightly to focus on the piece of meat, and the texture on the surface of the meat is clearly visible.", "visual_elements": [ { "content": "The camera moves forward slightly.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the piece of meat.", "type": "camera", "weight": 3 }, { "content": "The texture on the surface of the meat is clearly visible.", "type": "attribute", "weight": 2 } ] }, { "event": "Next, the camera continues to move forward, and the details of the piece of meat in the picture become even clearer. The text and logo on the packaging bag also gradually come into view until the text \"Tanmaya\" and a circular red logo on the packaging bag are completely clear, and the video ends.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The details of the piece of meat in the picture become even clearer.", "type": "attribute", "weight": 2 }, { "content": "The text and logo on the packaging bag also gradually come into view.", "type": "attribute", "weight": 2 }, { "content": "It is clear until the text \"Tanmaya\" and a circular red logo on the packaging bag are completely clear.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 14 }, { "index": "TUNA_0941", "video_path": "MiraData/video_clips/000005068/000005068763.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Animals & Pets", "duration": 20.92, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with the camera focused on a black and white dog with its tongue hanging out. The background is pure white with no other objects or people. The dog has bright eyes, drooping ears, and a brown collar around its neck. The camera remains steady, with the dog's head tilted slightly upward and its eyes looking forward.\nNext, the dog's head turns slightly to the right, with its tongue still hanging out. The dog's ears sway gently as the head turns.\nNext, the dog's head is raised and its eyes look upward. The dog's tongue is fully retracted, and its mouth is slightly open, revealing its teeth. Then, a hand extends from the left side of the frame, holding a yellow rectangular snack. The dog looks up, follows the snack with his eyes, sniffs the snack a few times, opens his mouth, turns to the left, and lowers his head to bite the snack. At the same time, the hand slowly releases the snack and disappears from the frame. Finally, the dog's entire body gradually disappears from the frame. The camera finally freezes on the pure white background.", "events": [ { "event": "The video begins with the camera focused on a black and white dog with its tongue hanging out. The background is pure white with no other objects or people. The dog has bright eyes, drooping ears, and a brown collar around its neck. The camera remains steady, with the dog's head tilted slightly upward and its eyes looking forward.", "visual_elements": [ { "content": "The camera is pointed at a black and white dog.", "type": "camera", "weight": 3 }, { "content": "The dog's tongue is sticking out.", "type": "action", "weight": 3 }, { "content": "The dog's eyes are bright, its ears are drooping, and it has a brown collar around its neck.", "type": "attribute", "weight": 2 }, { "content": "The camera is held steady.", "type": "camera", "weight": 3 }, { "content": "The dog's head is tilted slightly upwards, and its eyes are looking forward.", "type": "action", "weight": 3 } ] }, { "event": "Next, the dog's head turns slightly to the right, with its tongue still hanging out. The dog's ears sway gently as the head turns.", "visual_elements": [ { "content": "Next, the dog's head turns slightly to the right, and its tongue is still sticking out.", "type": "action", "weight": 2 }, { "content": "The dog's ears swing gently as its head turns.", "type": "action", "weight": 1 } ] }, { "event": "Next, the dog's head is raised and its eyes look upward. The dog's tongue is fully retracted, and its mouth is slightly open, revealing its teeth. Then, a hand extends from the left side of the frame, holding a yellow rectangular snack. The dog looks up, follows the snack with his eyes, sniffs the snack a few times, opens his mouth, turns to the left, and lowers his head to bite the snack. At the same time, the hand slowly releases the snack and disappears from the frame. Finally, the dog's entire body gradually disappears from the frame. The camera finally freezes on the pure white background.", "visual_elements": [ { "content": "The dog's head is raised, and its eyes are looking upwards.", "type": "action", "weight": 2 }, { "content": "The dog's tongue is completely retracted, and its mouth is slightly open, revealing its teeth.", "type": "action", "weight": 3 }, { "content": "Then, a hand extends from the left side of the frame.", "type": "action", "weight": 3 }, { "content": "The hand holds a yellow rectangular snack.", "type": "action", "weight": 3 }, { "content": "The dog looks up, and its eyes follow the snack.", "type": "action", "weight": 2 }, { "content": "The dog sniffs the snack a few times and opens its mouth to turn to the left, lowering its head to bite the snack.", "type": "action", "weight": 3 }, { "content": "At the same time, the hand slowly releases the snack and disappears from the frame.", "type": "action", "weight": 3 }, { "content": "Finally, the dog's entire body gradually disappears from the frame.", "type": "camera", "weight": 3 }, { "content": "The camera finally freezes on the pure white background.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0942", "video_path": "MiraData/video_clips/000005068/000005068765.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 24.28, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden table with various vegetables and ingredients placed on it. Firstly, the camera focuses on a pile of carrots and some mushrooms. The carrots are neatly piled up together, and the mushrooms beside them are placed on a wooden chopping board. Some red onions and potatoes can be seen in the background. Then, the camera moves forward to show a small dish of yellow spices and some almonds. Behind the spices, there is a wooden cutting board with some already sliced carrot pieces and a knife on it.\nThen, the camera turns to the right, and zucchini, broccoli, yellow bell peppers, cauliflower, red bell peppers, corn and cherry tomatoes appear in the picture one by one. Behind the cherry tomatoes, there are also radishes, garlic and a glass jar filled with grains.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden table with various vegetables and ingredients placed on it. Firstly, the camera focuses on a pile of carrots and some mushrooms. The carrots are neatly piled up together, and the mushrooms beside them are placed on a wooden chopping board. Some red onions and potatoes can be seen in the background. Then, the camera moves forward to show a small dish of yellow spices and some almonds. Behind the spices, there is a wooden cutting board with some already sliced carrot pieces and a knife on it.", "visual_elements": [ { "content": "The camera is focused on a wooden table.", "type": "camera", "weight": 3 }, { "content": "There are various vegetables and ingredients placed on the table.", "type": "attribute", "weight": 2 }, { "content": "Firstly, the camera focuses on a pile of carrots and some mushrooms.", "type": "camera", "weight": 3 }, { "content": "The carrots are neatly piled up together.", "type": "attribute", "weight": 2 }, { "content": "The mushrooms beside carrots are placed on a wooden chopping board.", "type": "attribute", "weight": 2 }, { "content": "Some red onions and potatoes can be seen in the background.", "type": "scene", "weight": 2 }, { "content": "The camera moves forward.", "type": "camera", "weight": 3 }, { "content": "It shows a small dish of yellow spices and some almonds.", "type": "attribute", "weight": 2 }, { "content": "Behind the spices, there is a wooden cutting board.", "type": "attribute", "weight": 2 }, { "content": "On the cutting board are some already sliced carrot pieces and a knife.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera turns to the right, and zucchini, broccoli, yellow bell peppers, cauliflower, red bell peppers, corn and cherry tomatoes appear in the picture one by one. Behind the cherry tomatoes, there are also radishes, garlic and a glass jar filled with grains.", "visual_elements": [ { "content": "The camera turns to the right.", "type": "camera", "weight": 3 }, { "content": "Zucchini, broccoli, yellow bell peppers, cauliflower, red bell peppers, corn and cherry tomatoes appear in the picture one by one.", "type": "attribute", "weight": 2 }, { "content": "Behind the cherry tomatoes, there are also radishes, garlic and a glass jar filled with grains.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 13 }, { "index": "TUNA_0943", "video_path": "MiraData/video_clips/000005068/000005068824.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 21.36, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a row of eggs placed in a paper egg carton. The background is a pink wall and the tabletop is wooden.\nThe camera moves slowly from left to right, and the color and size of each egg are slightly different.", "events": [ { "event": "At the beginning of the video, the camera is focused on a row of eggs placed in a paper egg carton. The background is a pink wall and the tabletop is wooden.", "visual_elements": [ { "content": "The camera is focused on a row of eggs placed in a paper egg carton.", "type": "camera", "weight": 3 }, { "content": "The background is a pink wall.", "type": "scene", "weight": 2 }, { "content": "The tabletop is wooden.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera moves slowly from left to right, and the color and size of each egg are slightly different.", "visual_elements": [ { "content": "The camera moves slowly from left to right.", "type": "camera", "weight": 3 }, { "content": "The color and size of each egg are slightly different.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 5 }, { "index": "TUNA_0944", "video_path": "MiraData/video_clips/000005068/000005068842.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 20.16, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera focuses on a group of fresh fruits and vegetables. Firstly, several red tomatoes, a red bell pepper and two apples appear in the picture. The apple closer to the camera is yellow, and the other one is red. A white bowl filled with some orange substances can be seen in the background. As the camera moves to the right, orange oranges gradually come into the picture, with some orange carrots beside them.\nThe camera continues to move to the right to show two sweet potatoes and some onions. There are several blurry mud spots on the sweet potatoes, and the onions still have long beards. There are some traces of dirt on the surface of the potatoes in front of the onions, and one of the potatoes has been cut in half, revealing the yellow area inside. Some yellow fruits and vegetables can be seen in the background. Finally, the camera shows two big corns, a yellow bell pepper and three small corns one by one. The grains of the corns are plump, and the surface of the bell pepper is smooth and shiny.", "events": [ { "event": "At the beginning of the video, the camera focuses on a group of fresh fruits and vegetables. Firstly, several red tomatoes, a red bell pepper and two apples appear in the picture. The apple closer to the camera is yellow, and the other one is red. A white bowl filled with some orange substances can be seen in the background. As the camera moves to the right, orange oranges gradually come into the picture, with some orange carrots beside them.", "visual_elements": [ { "content": "The camera focuses on a group of fresh fruits and vegetables.", "type": "camera", "weight": 3 }, { "content": "Several red tomatoes, a red bell pepper and two apples appear in the picture.", "type": "attribute", "weight": 3 }, { "content": "The apple closer to the camera is yellow.", "type": "attribute", "weight": 2 }, { "content": "The other one is red.", "type": "attribute", "weight": 2 }, { "content": "A white bowl filled can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "In the bowl are some orange substances.", "type": "attribute", "weight": 1 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "Orange oranges gradually come into the picture.", "type": "attribute", "weight": 3 }, { "content": "Some orange carrots are beside the oranges.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move to the right to show two sweet potatoes and some onions. There are several blurry mud spots on the sweet potatoes, and the onions still have long beards. There are some traces of dirt on the surface of the potatoes in front of the onions, and one of the potatoes has been cut in half, revealing the yellow area inside. Some yellow fruits and vegetables can be seen in the background. Finally, the camera shows two big corns, a yellow bell pepper and three small corns one by one. The grains of the corns are plump, and the surface of the bell pepper is smooth and shiny.", "visual_elements": [ { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "It shows two sweet potatoes and some onions.", "type": "attribute", "weight": 3 }, { "content": "There are several blurry mud spots on the sweet potatoes.", "type": "attribute", "weight": 1 }, { "content": "The onions still have long beards.", "type": "attribute", "weight": 2 }, { "content": "There are some traces of dirt on the surface of the potatoes in front of the onions.", "type": "attribute", "weight": 1 }, { "content": "One of the potatoes has been cut in half.", "type": "attribute", "weight": 2 }, { "content": "The potato reveals the yellow area inside.", "type": "attribute", "weight": 2 }, { "content": "Some yellow fruits and vegetables can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "The camera shows two big corns, a yellow bell pepper and three small corns one by one.", "type": "camera", "weight": 3 }, { "content": "The grains of the corns are plump.", "type": "attribute", "weight": 2 }, { "content": "The surface of the bell pepper is smooth and shiny.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 20 }, { "index": "TUNA_0945", "video_path": "MiraData/video_clips/000005068/000005068909.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 22.04, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on an empty wooden tabletop with a dark background. There are no items on the tabletop at first. Then, as the camera continues to move to the right, many items appear. A small black bowl filled with red powder gradually appears in the picture.\nThen, a transparent glass jar filled with red and yellow crushed particles and a wooden chopping board appear in the picture. The substances same as those in the glass jar are scattered on the left side of the chopping board, and some yellow particles are scattered on the right side of the chopping board. As the camera moves to the right, another transparent glass jar filled with red powder appears in the picture. There is a stainless steel small bowl beside the glass jar, which is filled with orange substances. Then, a black mortar and pestle appear in the picture.\nThe camera continues to move to the right, and a transparent glass jar filled with yellow particles appears in the picture, and at the same time, there is a small black bowl filled with orange powder in front of the glass jar.", "events": [ { "event": "At the beginning of the video, the camera is focused on an empty wooden tabletop with a dark background. There are no items on the tabletop at first. Then, as the camera continues to move to the right, many items appear. A small black bowl filled with red powder gradually appears in the picture.", "visual_elements": [ { "content": "The camera is focused on an empty wooden tabletop.", "type": "camera", "weight": 3 }, { "content": "The background is dark.", "type": "scene", "weight": 1 }, { "content": "There are no items on the tabletop at first.", "type": "scene", "weight": 1 }, { "content": "Then, as the camera continues to move to the right, many items appear.", "type": "camera", "weight": 3 }, { "content": "A small black bowl filled with red powder gradually appears in the picture.", "type": "attribute", "weight": 3 } ] }, { "event": "Then, a transparent glass jar filled with red and yellow crushed particles and a wooden chopping board appear in the picture. The substances same as those in the glass jar are scattered on the left side of the chopping board, and some yellow particles are scattered on the right side of the chopping board. As the camera moves to the right, another transparent glass jar filled with red powder appears in the picture. There is a stainless steel small bowl beside the glass jar, which is filled with orange substances. Then, a black mortar and pestle appear in the picture.", "visual_elements": [ { "content": "A transparent glass jar filled with red and yellow crushed particles and a wooden chopping board appear in the picture.", "type": "attribute", "weight": 3 }, { "content": "The substances same as those in the glass jar are scattered on the left side of the chopping board.", "type": "attribute", "weight": 2 }, { "content": "Some yellow particles are scattered on the right side of the chopping board.", "type": "attribute", "weight": 2 }, { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "Another transparent glass jar filled with red powder appears in the picture.", "type": "attribute", "weight": 2 }, { "content": "There is a stainless steel small bowl beside the glass jar.", "type": "attribute", "weight": 2 }, { "content": "The jar is filled with orange substances.", "type": "attribute", "weight": 2 }, { "content": "Then, a black mortar and pestle appear in the picture.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move to the right, and a transparent glass jar filled with yellow particles appears in the picture, and at the same time, there is a small black bowl filled with orange powder in front of the glass jar.", "visual_elements": [ { "content": "The camera continues to move to the right.", "type": "camera", "weight": 3 }, { "content": "A transparent glass jar filled with yellow particles appears in the picture.", "type": "attribute", "weight": 2 }, { "content": "At the same time, there is a small black bowl filled with orange powder in front of the glass jar.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0946", "video_path": "MiraData/video_clips/000005068/000005068945.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 22.56, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a group of white eggs, which are neatly arranged in a paper egg carton. The background is pure white. The surface of the eggs is smooth and the color is uniform.\nA hand appears in the picture. The hand enters the picture from the left side and gently picks up the paper egg carton with the eggs. The hand and the eggs gradually disappear from the left side of the picture.", "events": [ { "event": "At the beginning of the video, the camera is focused on a group of white eggs, which are neatly arranged in a paper egg carton. The background is pure white. The surface of the eggs is smooth and the color is uniform.", "visual_elements": [ { "content": "The camera is focused on a group of white eggs.", "type": "camera", "weight": 3 }, { "content": "These eggs are neatly arranged in a paper egg carton.", "type": "attribute", "weight": 3 }, { "content": "The background is pure white.", "type": "scene", "weight": 2 }, { "content": "The surface of the eggs is smooth and the color is uniform.", "type": "attribute", "weight": 2 } ] }, { "event": "A hand appears in the picture. The hand enters the picture from the left side and gently picks up the paper egg carton with the eggs. The hand and the eggs gradually disappear from the left side of the picture.", "visual_elements": [ { "content": "A hand appears in the picture.", "type": "scene", "weight": 2 }, { "content": "The hand enters the picture from the left side.", "type": "action", "weight": 3 }, { "content": "This hand picks up the paper egg carton with the eggs.", "type": "action", "weight": 3 }, { "content": "The hand and the eggs gradually disappear from the left side of the picture.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 8 }, { "index": "TUNA_0947", "video_path": "MiraData/video_clips/000005068/000005068953.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 23.6, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden chopping board with several different spices and beans placed on it. In the center of the picture is a bowl of red chili powder, and beside it is a bowl of yellow turmeric powder. Both bowls of spices are placed in black small bowls. Beside the small bowls, there is a transparent glass jar filled with crushed red chilies. The lid of the jar is of the metal buckle type. In the background, there is a black mortar and pestle, and beside the mortar is a glass jar filled with red chili powder.\nThen, the camera moves slowly to the left, and a glass jar filled with yellow beans appears in the picture first, followed by another glass jar filled with brown particles. Then, a black small bowl filled with brown spices appears in the picture.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden chopping board with several different spices and beans placed on it. In the center of the picture is a bowl of red chili powder, and beside it is a bowl of yellow turmeric powder. Both bowls of spices are placed in black small bowls. Beside the small bowls, there is a transparent glass jar filled with crushed red chilies. The lid of the jar is of the metal buckle type. In the background, there is a black mortar and pestle, and beside the mortar is a glass jar filled with red chili powder.", "visual_elements": [ { "content": "The camera is focused on a wooden chopping board.", "type": "camera", "weight": 3 }, { "content": "On the board are several different spices and beans.", "type": "attribute", "weight": 2 }, { "content": "In the center of the picture is a bowl of red chili powder.", "type": "attribute", "weight": 2 }, { "content": "Beside the. red chili powder is a bowl of yellow turmeric powder.", "type": "attribute", "weight": 2 }, { "content": "Both bowls of spices are placed in black small bowls.", "type": "attribute", "weight": 2 }, { "content": "Beside the small bowls, there is a transparent glass jar filled with crushed red chilies.", "type": "attribute", "weight": 2 }, { "content": "The lid of the jar is of the metal buckle type.", "type": "attribute", "weight": 2 }, { "content": "In the background, there is a black mortar and pestle.", "type": "scene", "weight": 1 }, { "content": "Beside the mortar is a glass jar filled with red chili powder.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera moves slowly to the left, and a glass jar filled with yellow beans appears in the picture first, followed by another glass jar filled with brown particles. Then, a black small bowl filled with brown spices appears in the picture.", "visual_elements": [ { "content": "The camera moves slowly to the left.", "type": "camera", "weight": 3 }, { "content": "A glass jar filled with yellow beans appears in the picture first.", "type": "scene", "weight": 3 }, { "content": "This jar is followed by another glass jar filled with brown particles.", "type": "attribute", "weight": 1 }, { "content": "A black small bowl appears in the picture.", "type": "scene", "weight": 2 }, { "content": "The small bowl is filled with brown spices.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 14 }, { "index": "TUNA_0948", "video_path": "MiraData/video_clips/000005068/000005068997.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 22.4, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden tabletop with various seasonings and ingredients placed on it. Firstly, the camera focuses on a white ceramic bowl filled with orange lentils. On the left side of the bowl, there is a black small dish filled with red chili powder.\nThen, the camera moves to the right. In the foreground, some star anises and black peppercorns are scattered on the wooden chopping board. Behind the chopping board is a glass jar filled with light brown beans. Then, a transparent glass jar filled with green peas appears on the right side of the picture. Beside the glass jar, there is a black small dish filled with some dried herbs. More glass jars filled with different kinds of beans and grains can be seen in the background. Finally, the camera focuses on some cardamoms on the tabletop.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden tabletop with various seasonings and ingredients placed on it. Firstly, the camera focuses on a white ceramic bowl filled with orange lentils. On the left side of the bowl, there is a black small dish filled with red chili powder.", "visual_elements": [ { "content": "The camera is focused on a wooden tabletop.", "type": "camera", "weight": 3 }, { "content": "Various seasonings and ingredients are placed on the table.", "type": "scene", "weight": 2 }, { "content": "Firstly, the camera focuses on a white ceramic bowl.", "type": "camera", "weight": 3 }, { "content": "The bowl is filled with orange lentils.", "type": "attribute", "weight": 3 }, { "content": "On the left side of the bowl is a black small dish.", "type": "attribute", "weight": 2 }, { "content": "The dish is filled with red chili powder.", "type": "attribute", "weight": 2 } ] }, { "event": "Then, the camera moves to the right. In the foreground, some star anises and black peppercorns are scattered on the wooden chopping board. Behind the chopping board is a glass jar filled with light brown beans. Then, a transparent glass jar filled with green peas appears on the right side of the picture. Beside the glass jar, there is a black small dish filled with some dried herbs. More glass jars filled with different kinds of beans and grains can be seen in the background. Finally, the camera focuses on some cardamoms on the tabletop.", "visual_elements": [ { "content": "The camera moves to the right.", "type": "camera", "weight": 3 }, { "content": "In the foreground, some star anises and black peppercorns are scattered on the wooden chopping board.", "type": "scene", "weight": 3 }, { "content": "Behind the chopping board is a glass jar filled with light brown beans.", "type": "attribute", "weight": 3 }, { "content": "A transparent glass jar filled with green peas appears on the right side of the picture.", "type": "scene", "weight": 3 }, { "content": "Beside the glass jar, there is a black small dish。", "type": "attribute", "weight": 3 }, { "content": "The dish is filled with some dried herbs.", "type": "attribute", "weight": 3 }, { "content": "More glass jars can be seen in the background.", "type": "scene", "weight": 1 }, { "content": "The jars are filled with different kinds of beans and grains", "type": "attribute", "weight": 1 }, { "content": "Finally, the camera focuses on some cardamoms on the tabletop.", "type": "camera", "weight": 3 } ] } ], "n_events": 2, "n_elements": 15 }, { "index": "TUNA_0949", "video_path": "MiraData/video_clips/000005069/000005069000.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic", "domain": "Foods", "duration": 25.6, "resolution": { "width": 1280, "height": 720 }, "caption": "At the beginning of the video, the camera is focused on a wooden tabletop with a small black bowl filled with dried herbs placed on it. Some dried beans are scattered in front of the bowl. Beside it is a transparent glass jar filled with green beans. The background is gray.\nThe camera moves slowly to the left. Two transparent glass jars filled with yellow beans appear in the picture, and beside the jars, there is another glass jar filled with brown beans. In the foreground, there is a white bowl filled with yellow powder. Beside the white bowl, there is a wooden chopping board with some scattered black peppercorns and star anises.\nThe camera continues to move to the left, and a small black bowl filled with yellow powder appears in the picture, beside which there is another small black bowl filled with red powder. In the background, two transparent glass jars can be seen, one filled with crushed red chilies, and the other filled with red powder. There is also another white bowl filled with orange substances.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wooden tabletop with a small black bowl filled with dried herbs placed on it. Some dried beans are scattered in front of the bowl. Beside it is a transparent glass jar filled with green beans. The background is gray.", "visual_elements": [ { "content": "The camera is focused on a wooden tabletop.", "type": "camera", "weight": 3 }, { "content": "A small black bowl filled with dried herbs is placed on the table.", "type": "attribute", "weight": 3 }, { "content": "Some dried beans are scattered in front of the bowl.", "type": "attribute", "weight": 3 }, { "content": "Beside the bowl is a transparent glass jar filled with green beans.", "type": "attribute", "weight": 2 }, { "content": "The background is gray.", "type": "scene", "weight": 1 } ] }, { "event": "The camera moves slowly to the left. Two transparent glass jars filled with yellow beans appear in the picture, and beside the jars, there is another glass jar filled with brown beans. In the foreground, there is a white bowl filled with yellow powder. Beside the white bowl, there is a wooden chopping board with some scattered black peppercorns and star anises.", "visual_elements": [ { "content": "The camera moves slowly to the left.", "type": "camera", "weight": 3 }, { "content": "Two transparent glass jars filled with yellow beans appear in the picture.", "type": "scene", "weight": 2 }, { "content": "Beside the jars, there is another glass jar filled with brown beans.", "type": "attribute", "weight": 2 }, { "content": "In the foreground, there is a white bowl filled with yellow powder.", "type": "scene", "weight": 1 }, { "content": "Beside the white bowl, there is a wooden chopping board.", "type": "attribute", "weight": 3 }, { "content": "On the board are some scattered black peppercorns and star anises.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera continues to move to the left, and a small black bowl filled with yellow powder appears in the picture, beside which there is another small black bowl filled with red powder. In the background, two transparent glass jars can be seen, one filled with crushed red chilies, and the other filled with red powder. There is also another white bowl filled with orange substances.", "visual_elements": [ { "content": "The camera continues to move to the left.", "type": "camera", "weight": 3 }, { "content": "A small black bowl filled with yellow powder appears in the picture.", "type": "scene", "weight": 3 }, { "content": "Beside the black bowl there is another small black bowl filled with red powder.", "type": "attribute", "weight": 3 }, { "content": "In the background, two transparent glass jars can be seen.", "type": "scene", "weight": 2 }, { "content": "One is filled with crushed red chilies.", "type": "attribute", "weight": 2 }, { "content": "The other is filled with red powder.", "type": "attribute", "weight": 1 }, { "content": "There is also another white bowl filled with orange substances.", "type": "attribute", "weight": 1 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0950", "video_path": "MiraData/video_clips/000005071/000005071979.0.mp4", "video_source": "MiraData", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Animals & Pets", "duration": 26.28, "resolution": { "width": 1280, "height": 720 }, "caption": "The video begins with a long shot. The camera is focused on a snow-covered forest path with tall pine trees on both sides. On the path is a young white man wearing a red down vest, a gray long-sleeved top, and black trousers and shoes. The man wears a gray knitted hat and gloves and leads a dog on an orange leash. The dog's face is black and the rest is tan. The man and the dog jog along the path, and the camera captures their movements from the front.\nAs the man and the dog run forward, the camera gradually zooms in and stays in front of them. The man's steps are steady, and the dog follows closely on his left, with its ears erect and its eyes focused. The sun is on the left side of the frame, and the pine forest in the background appears particularly bright under the sunlight, and the snow on the ground reflects a faint light.\nThen, the camera switches to a closer angle to the left, focusing on the legs of the man and the dog, showing their synchronized running movements. The pine forest on the left side of the picture disappears into the camera.", "events": [ { "event": "The video begins with a long shot. The camera is focused on a snow-covered forest path with tall pine trees on both sides. On the path is a young white man wearing a red down vest, a gray long-sleeved top, and black trousers and shoes. The man wears a gray knitted hat and gloves and leads a dog on an orange leash. The dog's face is black and the rest is tan. The man and the dog jog along the path, and the camera captures their movements from the front.", "visual_elements": [ { "content": "The video begins with a long shot.", "type": "camera", "weight": 3 }, { "content": "A snow-covered forest path.", "type": "scene", "weight": 3 }, { "content": "Tall pine trees line the path.", "type": "scene", "weight": 3 }, { "content": "A young white man wearing a red down vest, a gray long-sleeved top, black trousers and shoes is on the path.", "type": "attribute", "weight": 3 }, { "content": "The man wears a gray knitted hat and gloves.", "type": "attribute", "weight": 2 }, { "content": "The man is leading a dog on an orange leash.", "type": "action", "weight": 3 }, { "content": "The dog's face is black, and the rest is brown.", "type": "attribute", "weight": 2 }, { "content": "The man and the dog are jogging along the path.", "type": "action", "weight": 3 }, { "content": "The camera captures their movements from the front.", "type": "camera", "weight": 3 } ] }, { "event": "As the man and the dog run forward, the camera gradually zooms in and stays in front of them. The man's steps are steady, and the dog follows closely on his left, with its ears erect and its eyes focused. The sun is on the left side of the frame, and the pine forest in the background appears particularly bright under the sunlight, and the snow on the ground reflects a faint light.", "visual_elements": [ { "content": "As the man and the dog run forward, the camera gradually zooms in, staying in front of them.", "type": "camera", "weight": 3 }, { "content": "The man's steps are steady, and the dog follows closely on his left.", "type": "action", "weight": 3 }, { "content": "The dog's ears are erect and his eyes are focused.", "type": "attribute", "weight": 1 }, { "content": "The sun is on the left side of the picture.", "type": "scene", "weight": 2 }, { "content": "The pine forest in the background is particularly bright under the sunlight.", "type": "scene", "weight": 1 }, { "content": "The snow on the ground reflects a faint light.", "type": "scene", "weight": 1 } ] }, { "event": "Then, the camera switches to a closer angle to the left, focusing on the legs of the man and the dog, showing their synchronized running movements. The pine forest on the left side of the picture disappears into the camera.", "visual_elements": [ { "content": "Then, the camera switches to a closer angle to the left.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the legs of the man and the dog, showing their synchronized running movements.", "type": "camera", "weight": 3 }, { "content": "The pine forest on the left side of the picture disappears into the camera.", "type": "scene", "weight": 3 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0951", "video_path": "CoVLA/2022-07-14--14-32-55--10_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a crossroads with a wet and slippery road. There is a truck on the straight lane after the crossroads. Tall buildings can be seen in the distance, surrounded by some green trees. The traffic lights ahead are showing green, and there is a pedestrian holding an umbrella standing under the post of the traffic lights. Several cars are waiting to cross the intersection on the left side of the scene.\nThe camera pans to the left, revealing a flowerbed in the middle of the road that serves as a divider for the lanes. In the line of vehicles waiting to pass on the right side of the flowerbed, there stands out a bright orange cement mixer truck. In the background, the tall buildings remain clearly visible.\nContinuing to pan to the left, a wide straight road enters the scene, with sparse traffic on it and white road markings painted on the ground. Above the road, a blue traffic sign indicates that the road ahead leads to Hibiya, Ginza, and Echizenjima. In the distance, a pedestrian bridge spans across the road.\nThe camera continues to move forward, with a white car driving ahead in the next lane. The lane ahead is marked with traffic signs indicating that it allows both straight and right turns. At the upcoming fork in the road, the traffic lights show red, while the left turn arrow is green. In the background, tall buildings and greenbelts are still visible.\nThe camera continues to move forward and then stops. At the fork in the road ahead, there is a triangular flowerbed surrounded by white railings. At the intersection of the flowerbed, a yellow device is installed, with a yellow traffic light constantly flashing on it. Vehicles are continuously passing through the intersection ahead. In the background, tall buildings and greenbelts remain visible. On the overhead pedestrian bridge at the top of the scene, a pedestrian enters the scene from the right side.", "events": [ { "event": "At the beginning of the video, the camera focuses on a crossroads with a wet and slippery road. There is a truck on the straight lane after the crossroads. Tall buildings can be seen in the distance, surrounded by some green trees. The traffic lights ahead are showing green, and there is a pedestrian holding an umbrella standing under the post of the traffic lights. Several cars are waiting to cross the intersection on the left side of the scene.", "visual_elements": [ { "content": "The camera focuses on a crossroads.", "type": "camera", "weight": 3 }, { "content": "The ground is wet and slippery.", "type": "attribute", "weight": 2 }, { "content": "There is a truck on the straight lane after the crossroads.", "type": "attribute", "weight": 1 }, { "content": "Tall buildings can be seen in the distance.", "type": "scene", "weight": 2 }, { "content": "There are some green trees around.", "type": "scene", "weight": 2 }, { "content": "The traffic lights ahead are green.", "type": "attribute", "weight": 2 }, { "content": "There is a pedestrian holding an umbrella under the post of the traffic lights.", "type": "attribute", "weight": 2 }, { "content": "Several cars on the left side of the scene are waiting to get cross the intersection.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans to the left, revealing a flowerbed in the middle of the road that serves as a divider for the lanes. In the line of vehicles waiting to pass on the right side of the flowerbed, there stands out a bright orange cement mixer truck. In the background, the tall buildings remain clearly visible.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "The flowerbed in the middle of the road serves as a divider for the lanes.", "type": "attribute", "weight": 2 }, { "content": "In the line of vehicles waiting to pass on the right side of the flowerbed, there is a bright orange cement mixer truck.", "type": "attribute", "weight": 2 } ] }, { "event": "Continuing to pan to the left, a wide straight road enters the scene, with sparse traffic on it and white road markings painted on the ground. Above the road, a blue traffic sign indicates that the road ahead leads to Hibiya, Ginza, and Echizenjima. In the distance, a pedestrian bridge spans across the road.", "visual_elements": [ { "content": "The camera continues to pan to the left.", "type": "camera", "weight": 3 }, { "content": "A wide straight road comes into the scene.", "type": "attribute", "weight": 3 }, { "content": "White road markings are painted on the ground.", "type": "attribute", "weight": 2 }, { "content": "A blue traffic sign above the road indicates that the road ahead leads to Hibiya, Ginza, and Echizenjima.", "type": "attribute", "weight": 2 }, { "content": "In the distance, a pedestrian bridge stretches across the road overhead.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward, with a white car driving ahead in the next lane. The lane ahead is marked with traffic signs indicating that it allows both straight and right turns. At the upcoming fork in the road, the traffic lights show red, while the left turn arrow is green. In the background, tall buildings and greenbelts are still visible.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "A white car is driving ahead in the next lane.", "type": "action", "weight": 2 }, { "content": "The lane ahead has traffic markings indicating that it allows both straight and right turns.", "type": "attribute", "weight": 2 }, { "content": "The traffic lights at the upcoming fork in the road show red.", "type": "attribute", "weight": 2 }, { "content": "The left turn arrow on the traffic lights is green.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward and then stops. At the fork in the road ahead, there is a triangular flowerbed surrounded by white railings. At the intersection of the flowerbed, a yellow device is installed, with a yellow traffic light constantly flashing on it. Vehicles are continuously passing through the intersection ahead. In the background, tall buildings and greenbelts remain visible. On the overhead pedestrian bridge at the top of the scene, a pedestrian enters the scene from the right side.", "visual_elements": [ { "content": "The camera continues to move forward and then stops.", "type": "camera", "weight": 3 }, { "content": "There is a triangular flowerbed at the fork in the road ahead.", "type": "attribute", "weight": 2 }, { "content": "The flowerbed is surrounded by white railings.", "type": "attribute", "weight": 2 }, { "content": "A yellow device is installed at the intersection of the flowerbed.", "type": "attribute", "weight": 2 }, { "content": "The yellow traffic light on the device is constantly flashing.", "type": "attribute", "weight": 2 }, { "content": "Vehicles are continuously passing through the intersection ahead.", "type": "attribute", "weight": 2 }, { "content": "On the overhead pedestrian bridge at the top of the scene, a pedestrian enters the scene from the right side.", "type": "action", "weight": 1 } ] } ], "n_events": 5, "n_elements": 28 }, { "index": "TUNA_0952", "video_path": "CoVLA/2022-07-14--14-32-55--11_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a fork in the road with a wet and slippery surface. The lane ahead is marked with traffic signs indicating that it allows both straight and right turns. On the left side of the scene, there is a triangular flowerbed surrounded by white railings. At the intersection of the flowerbed, a yellow device is installed, with a yellow traffic light constantly flashing on it. The straight traffic light ahead is red, while the left turn arrow is green. Above the scene, there is an elevated pedestrian bridge. In the background, there stands a modern high-rise building, with lush trees in front of it.\nThe traffic light changes from red to green. The camera pans to the right, revealing a road in the scene. On both sides of the road are the descending staircases of an overhead bridge. On the left side of the road, a gray van is parked by the curb. On the right side, white solid lines are painted on the curb, with several cars parked at intervals within the lines. Behind the road, there stands a tall blue building.\nContinuing to pan to the right, the camera captures a straight main road in the scene. The flowerbed on the left side of the road is surrounded by white guardrails, and yellow marking lines are painted at intervals on the road shoulder below the guardrails. Along a section of the road next to the guardrails, white marking lines indicating a speed reduction zone are painted. A row of vehicles is parked on the left side of the road ahead.\nThe camera pans forward, revealing a blue traffic sign hanging above the road. On the ground ahead, a yellow speed limit sign is painted, indicating a maximum speed of 40. A yellow truck is driving ahead, and in the distance, the traffic lights show green. On both sides of the road are buildings and greenbelts.", "events": [ { "event": "At the beginning of the video, the camera focuses on a fork in the road with a wet and slippery surface. The lane ahead is marked with traffic signs indicating that it allows both straight and right turns. On the left side of the scene, there is a triangular flowerbed surrounded by white railings. At the intersection of the flowerbed, a yellow device is installed, with a yellow traffic light constantly flashing on it. The straight traffic light ahead is red, while the left turn arrow is green. Above the scene, there is an elevated pedestrian bridge. In the background, there stands a modern high-rise building, with lush trees in front of it.", "visual_elements": [ { "content": "The camera focuses on a fork in the road.", "type": "camera", "weight": 3 }, { "content": "The ground is wet and slippery.", "type": "attribute", "weight": 2 }, { "content": "The lane ahead is marked with traffic signs indicating that it allows both straight and right turns.", "type": "attribute", "weight": 2 }, { "content": "There is a triangular flowerbed on the left side of the scene.", "type": "attribute", "weight": 2 }, { "content": "The flowerbed is surrounded by white railings.", "type": "attribute", "weight": 2 }, { "content": "A yellow device is installed at the intersection of the flowerbed.", "type": "attribute", "weight": 2 }, { "content": "The yellow traffic light on the device is constantly flashing.", "type": "attribute", "weight": 2 }, { "content": "The straight traffic light ahead shows red.", "type": "attribute", "weight": 2 }, { "content": "The left turn arrow on the traffic light ahead is green.", "type": "attribute", "weight": 2 }, { "content": "There is an elevated pedestrian bridge above the scene.", "type": "attribute", "weight": 2 }, { "content": "In the background, there stands a modern high-rise building.", "type": "scene", "weight": 2 }, { "content": "There are lush trees in front of the building.", "type": "scene", "weight": 2 } ] }, { "event": "The traffic light changes from red to green. The camera pans to the right, revealing a road in the scene. On both sides of the road are the descending staircases of an overhead bridge. On the left side of the road, a gray van is parked by the curb. On the right side, white solid lines are painted on the curb, with several cars parked at intervals within the lines. Behind the road, there stands a tall blue building.", "visual_elements": [ { "content": "The traffic light changes from red to green.", "type": "action", "weight": 3 }, { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "A road appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "On both sides of the road are the descending staircases of an overhead bridge.", "type": "attribute", "weight": 1 }, { "content": "A gray van is parked by the curb on the left side of the road.", "type": "attribute", "weight": 1 }, { "content": "White solid lines are painted on the curb on the right side of the road.", "type": "attribute", "weight": 2 }, { "content": "Several cars are parked at intervals within the lines.", "type": "attribute", "weight": 2 }, { "content": "Behind the road, there stands a tall blue building.", "type": "scene", "weight": 2 } ] }, { "event": "Continuing to pan to the right, the camera captures a straight main road in the scene. The flowerbed on the left side of the road is surrounded by white guardrails, and yellow marking lines are painted at intervals on the road shoulder below the guardrails. Along a section of the road next to the guardrails, white marking lines indicating a speed reduction zone are painted. A row of vehicles is parked on the left side of the road ahead.", "visual_elements": [ { "content": "The camera continues to pan to the right.", "type": "camera", "weight": 3 }, { "content": "A straight main road appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "The flowerbed on the left side of the road is surrounded by white guardrails.", "type": "attribute", "weight": 2 }, { "content": "Yellow marking lines are painted at intervals on the road shoulder below the guardrails.", "type": "attribute", "weight": 1 }, { "content": "White speed reduction signs are painted along a section of the road next to the guardrails.", "type": "attribute", "weight": 2 }, { "content": "A row of vehicles is parked on the left side of the road ahead.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans forward, revealing a blue traffic sign hanging above the road. On the ground ahead, a yellow speed limit sign is painted, indicating a maximum speed of 40. A yellow truck is driving ahead, and in the distance, the traffic lights show green. On both sides of the road are buildings and greenbelts.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "A blue traffic sign hangs above the road.", "type": "scene", "weight": 2 }, { "content": "A yellow speed limit sign is painted on the ground ahead.", "type": "scene", "weight": 2 }, { "content": "The speed limit sign indicates a maximum speed of 40.", "type": "attribute", "weight": 2 }, { "content": "A yellow truck is driving ahead.", "type": "attribute", "weight": 2 }, { "content": "The traffic lights in the distance show green.", "type": "scene", "weight": 2 }, { "content": "On both sides of the road are buildings and greenbelts.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 33 }, { "index": "TUNA_0953", "video_path": "CoVLA/2022-07-19--11-55-49--22_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on an urban street, The perspective is shot from inside a car looking forward. Ahead, there is a gray SUV with its license plate blurred. A yellow solid line is painted in the center of the road, and next to the yellow line on the right side of the scene, there are white lane division markings. On the left side of the street are low-rise residential buildings, with a yellow wall painted with some flowers. On the right side of the street, there is a sign for a parking lot by the curb, where several cars are parked. Power poles and wires extend along both sides of the street, and the sky is overcast.\nThe camera pans forward as the gray SUV drives ahead. A white car and a truck approach from the right lane. On the right side of the street, there is an empty lot where several cars are parked. On the sidewalk on the left, there is a pedestrian dressed in red, walking with a backpack on.\nThe camera continues to move forward. The traffic light ahead is green, and the gray SUV in front slows down to pass through an intersection. Three cars also drive past on the right lane. Below the traffic light, there is an orange convex mirror for traffic safety. On the sidewalk on the left side of the road, a student with a backpack is walking.\nThe camera continues to move forward, the vehicle drives into a downhill section. On the left half of the road, text and speed reduction signs are written by white paint, reminding drivers to slow down. There are two pedestrians on the sidewalk on the left side of the road. In front of a house on the left side of the road, two cars are parked, with a white sign between them displaying green text and numbers.\nThe camera continues to move forward, and traffic ahead becomes slightly congested. The traffic light at the crossroads is green. A blue billboard appears on the left side of the road, with white text and graphics on it. Cars continue to drive past on the right lane. The video stops as the gray SUV just passes through the green light. Buildings are densely distributed on both sides of the road.", "events": [ { "event": "At the beginning of the video, the camera focuses on an urban street, The perspective is shot from inside a car looking forward. Ahead, there is a gray SUV with its license plate blurred. A yellow solid line is painted in the center of the road, and next to the yellow line on the right side of the scene, there are white lane division markings. On the left side of the street are low-rise residential buildings, with a yellow wall painted with some flowers. On the right side of the street, there is a sign for a parking lot by the curb, where several cars are parked. Power poles and wires extend along both sides of the street, and the sky is overcast.", "visual_elements": [ { "content": "The camera focuses on an urban street.", "type": "camera", "weight": 3 }, { "content": "The perspective is shot from inside a car looking forward.", "type": "camera", "weight": 3 }, { "content": "There is a gray SUV ahead.", "type": "attribute", "weight": 2 }, { "content": "The SUV's license plate is blurred.", "type": "attribute", "weight": 1 }, { "content": "A yellow solid line is painted in the center of the road.", "type": "attribute", "weight": 2 }, { "content": "Next to the yellow line on the right side of the scene, there are white lane division markings.", "type": "attribute", "weight": 1 }, { "content": "On the left side of the street are low-rise residential buildings.", "type": "attribute", "weight": 2 }, { "content": "A yellow wall is painted with some flowers.", "type": "attribute", "weight": 1 }, { "content": "There is a sign for a parking lot on the right side of the street.", "type": "attribute", "weight": 2 }, { "content": "Several cars are parked in the parking lot.", "type": "scene", "weight": 1 }, { "content": "Power poles and wires extend along both sides of the street.", "type": "attribute", "weight": 2 }, { "content": "The sky is overcast.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans forward as the gray SUV drives ahead. A white car and a truck approach from the right lane. On the right side of the street, there is an empty lot where several cars are parked. On the sidewalk on the left, there is a pedestrian dressed in red, walking with a backpack on.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "The gray SUV is driving ahead.", "type": "action", "weight": 2 }, { "content": "A white car and a truck drive from the right lane.", "type": "action", "weight": 2 }, { "content": "There is a vacant lot on the right side of the street.", "type": "scene", "weight": 1 }, { "content": "Several cars are parked on the vacant lot.", "type": "scene", "weight": 2 }, { "content": "On the sidewalk on the left, there is a pedestrian dressed in red.", "type": "scene", "weight": 2 }, { "content": "The pedestrian is walking with a backpack.", "type": "action", "weight": 2 } ] }, { "event": "The camera continues to move forward. The traffic light ahead is green, and the gray SUV in front slows down to pass through an intersection. Three cars also drive past on the right lane. Below the traffic light, there is an orange convex mirror for traffic safety. On the sidewalk on the left side of the road, a student with a backpack is walking.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The traffic light ahead is green.", "type": "attribute", "weight": 2 }, { "content": "The gray SUV ahead slows down.", "type": "action", "weight": 1 }, { "content": "The SUV passes through an intersection.", "type": "action", "weight": 2 }, { "content": "Three cars also drive past on the right lane.", "type": "action", "weight": 2 }, { "content": "Below the traffic light, there is an orange convex mirror for traffic safety.", "type": "scene", "weight": 1 }, { "content": "On the sidewalk on the left side of the road, a student with a backpack is walking.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward, the vehicle drives into a downhill section. On the left half of the road, text and speed reduction signs are written by white paint, reminding drivers to slow down. There are two pedestrians on the sidewalk on the left side of the road. In front of a house on the left side of the road, two cars are parked, with a white sign between them displaying green text and numbers.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The vehicle drives into a downhill section.", "type": "action", "weight": 2 }, { "content": "On the left half of the road, text and speed reduction signs are written by white paint.", "type": "attribute", "weight": 2 }, { "content": "There are two pedestrians on the sidewalk on the left side of the road.", "type": "scene", "weight": 2 }, { "content": "In front of a house on the left side of the road, two cars are parked.", "type": "scene", "weight": 2 }, { "content": "There is a white sign between the two cars.", "type": "attribute", "weight": 2 }, { "content": "The sign displays green text and numbers.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward, and traffic ahead becomes slightly congested. The traffic light at the crossroads is green. A blue billboard appears on the left side of the road, with white text and graphics on it. Cars continue to drive past on the right lane. The video stops as the gray SUV just passes through the green light. Buildings are densely distributed on both sides of the road.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Traffic ahead becomes slightly congested.", "type": "attribute", "weight": 2 }, { "content": "The traffic light at the crossroads is green.", "type": "attribute", "weight": 2 }, { "content": "A blue billboard appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "There are white text and graphics on the billboard.", "type": "attribute", "weight": 2 }, { "content": "Cars continue to drive past on the right lane.", "type": "action", "weight": 2 }, { "content": "The video stops as the gray SUV just passes through the green light.", "type": "camera", "weight": 3 }, { "content": "Buildings are densely distributed on both sides of the road.", "type": "scene", "weight": 1 } ] } ], "n_events": 5, "n_elements": 41 }, { "index": "TUNA_0954", "video_path": "CoVLA/2022-07-19--13-42-48--114_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera is fixed inside a moving car, facing forward towards the road. A two-way lane can be seen in the scene, with distinct zebra crossings and lane markings on the pavement. To the left of the road is a multi-story red apartment building, while to the right are some shops and billboards.\nThe camera pans forward. There are several cars driving on the road ahead. Along the left roadside is a row of white guardrails. The red apartment building exits the scene, and a multi-story parking garage appears on the left side of the scene. The shops and billboards on the right remain clearly visible, with one billboard displaying the words \"ENEOS,\" indicating that there is a gas station nearby.\nThe camera continues to move forward, revealing an empty space on the left. In this empty space, there is a convenience store named \"Lawson Station,\" with several parked cars in front. The buildings on the right gradually become sparse, and some empty spaces and parking lots can be seen.\nThe camera continues to move forward, and a deceleration sign and the warning text \"Sudden Pursuit Caution\" appear on the left lane. A large blue road sign hangs above the road. The lane transitions from straight to a right curve, with part of the right curve painted red. Yellow road signs are spaced along the left roadside, and red arrows on the signs indicate the direction of drive.\nThe camera continues to move forward, and ahead is a T-junction. A divergence sign is painted in the middle of the intersection. The green light of the traffic signal is flashing constantly, and cars accelerate through the intersection. A speed limit sign of 50 is posted on the road ahead. The buildings on both sides of the road are neatly arranged.", "events": [ { "event": "At the beginning of the video, the camera is fixed inside a moving car, facing forward towards the road. A two-way lane can be seen in the scene, with distinct zebra crossings and lane markings on the pavement. To the left of the road is a multi-story red apartment building, while to the right are some shops and billboards.", "visual_elements": [ { "content": "The camera is fixed inside a moving car.", "type": "camera", "weight": 3 }, { "content": "The camera's perspective faces the road ahead.", "type": "camera", "weight": 3 }, { "content": "A two-way lane can be seen in the scene.", "type": "attribute", "weight": 2 }, { "content": "There are distinct zebra crossings and lane markings on the pavement.", "type": "attribute", "weight": 2 }, { "content": "There is a multi-story red apartment building on the left side of the road.", "type": "scene", "weight": 2 }, { "content": "There are some shops and billboards on the right side of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans forward. There are several cars driving on the road ahead. Along the left roadside is a row of white guardrails. The red apartment building exits the scene, and a multi-story parking garage appears on the left side of the scene. The shops and billboards on the right remain clearly visible, with one billboard displaying the words \"ENEOS,\" indicating that there is a gas station nearby.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "There are several cars driving on the road ahead.", "type": "attribute", "weight": 2 }, { "content": "Along the left roadside is a row of white guardrails.", "type": "scene", "weight": 2 }, { "content": "A multi-story parking garage appears on the left side of the scene.", "type": "attribute", "weight": 2 }, { "content": "One billboard on the right side of the road displays the words \"ENEOS\".", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward, revealing an empty space on the left. In this empty space, there is a convenience store named \"Lawson Station,\" with several parked cars in front. The buildings on the right gradually become sparse, and some empty spaces and parking lots can be seen.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "An empty space appears on the left.", "type": "attribute", "weight": 1 }, { "content": "In this empty space, there is a convenience store named \"Lawson Station\".", "type": "attribute", "weight": 2 }, { "content": "There are several parked cars in front of the store.", "type": "attribute", "weight": 2 }, { "content": "The buildings on the right gradually become sparse.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to move forward, and a deceleration sign and the warning text \"Sudden Pursuit Caution\" appear on the left lane. A large blue road sign hangs above the road. The lane transitions from straight to a right curve, with part of the right curve painted red. Yellow road signs are spaced along the left roadside, and red arrows on the signs indicate the direction of drive.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "A deceleration sign and the warning text \"Sudden Pursuit Caution\" appear on the left lane.", "type": "attribute", "weight": 2 }, { "content": "A huge blue road sign hangs above the road.", "type": "attribute", "weight": 2 }, { "content": "The lane transitions from straight to a right curve.", "type": "attribute", "weight": 1 }, { "content": "Part of the right curve is painted red.", "type": "attribute", "weight": 2 }, { "content": "Yellow road signs are spaced along the left roadside.", "type": "attribute", "weight": 2 }, { "content": "Red arrows on the signs indicate the direction of drive.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward, and ahead is a T-junction. A divergence sign is painted in the middle of the intersection. The green light of the traffic signal is flashing constantly, and cars accelerate through the intersection. A speed limit sign of 50 is posted on the road ahead. The buildings on both sides of the road are neatly arranged.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Ahead is a T-junction.", "type": "attribute", "weight": 2 }, { "content": "A divergence sign is painted in the middle of the intersection.", "type": "attribute", "weight": 1 }, { "content": "The green light of the traffic signal is flashing constantly.", "type": "attribute", "weight": 1 }, { "content": "Cars accelerate through the intersection.", "type": "action", "weight": 2 }, { "content": "A speed limit sign of 50 is posted on the road ahead.", "type": "attribute", "weight": 2 }, { "content": "The buildings on both sides of the road are neatly arranged.", "type": "scene", "weight": 2 } ] } ], "n_events": 5, "n_elements": 30 }, { "index": "TUNA_0955", "video_path": "CoVLA/2022-07-21--10-42-48--73_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene,Multi-Subject", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the scene shows a silver car parked at an intersection, with its license plate blurred. To the left of the intersection, there is a red \"止まれ STOP\" sign. The background features a modern glass building, in front of which several people are walking. A middle-aged man wearing a white shirt and khaki shorts walks past the car and continues to move towards the right side of the scene.\nThe silver car turns right with its right turn signal on, while the camera pans forward. In the middle of the scene, a white car is parked by the roadside, and the driver can be seen through the window.\nThen, the camera pans to the left, revealing two more cars lined up behind the white one. When the camera stops panning, a two-way lane appears in the scene. Vehicles in red and white colors continuously drive forward on the right lane. On the sidewalk to the left of the road, a person rides a bicycle. Buildings and utility poles along both sides of the street can be seen in the background.\nThe camera continues to move forward. The silver car reappears in the scene and proceeds to drive forward. The traffic signal light ahead is red, prompting the silver car to slow down and stop. In front of the left side of the car, a person rides an electric tricycle. There is a white billboard with blue text on the left side of the road. On the left side of the intersection, a silver SUV is parked.", "events": [ { "event": "At the beginning of the video, the scene shows a silver car parked at an intersection, with its license plate blurred. To the left of the intersection, there is a red \"止まれ STOP\" sign. The background features a modern glass building, in front of which several people are walking. A middle-aged man wearing a white shirt and khaki shorts walks past the car and continues to move towards the right side of the scene.", "visual_elements": [ { "content": "The scene shows a silver car parked at the intersection.", "type": "attribute", "weight": 3 }, { "content": "The car's license plate is blurred.", "type": "attribute", "weight": 1 }, { "content": "There is a red \"止まれ STOP\" sign on the left side of the intersection.", "type": "attribute", "weight": 2 }, { "content": "The background is a modern glass building.", "type": "scene", "weight": 2 }, { "content": "Several people are walking in front of the building.", "type": "scene", "weight": 2 }, { "content": "A middle-aged man wearing a white shirt and khaki shorts passes in front of the silver car and continues walking to the right side of the scene.", "type": "action", "weight": 2 } ] }, { "event": "The silver car turns right with its right turn signal on, while the camera pans forward. In the middle of the scene, a white car is parked by the roadside, and the driver can be seen through the window.", "visual_elements": [ { "content": "The silver car turns right with its right turn signal on.", "type": "action", "weight": 3 }, { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "In the middle of the scene, a white car is parked by the roadside.", "type": "attribute", "weight": 3 }, { "content": "The driver can be seen through the window.", "type": "attribute", "weight": 1 } ] }, { "event": "Then, the camera pans to the left, revealing two more cars lined up behind the white one. When the camera stops panning, a two-way lane appears in the scene. Vehicles in red and white colors continuously drive forward on the right lane. On the sidewalk to the left of the road, a person rides a bicycle. Buildings and utility poles along both sides of the street can be seen in the background.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "There are two more cars lined up behind the white car.", "type": "attribute", "weight": 2 }, { "content": "The camera stops panning.", "type": "camera", "weight": 3 }, { "content": "A two-way lane appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "Vehicles in red and white colors continuously drive forward on the right lane.", "type": "action", "weight": 3 }, { "content": "On the sidewalk to the left of the road, a person rides a bicycle.", "type": "attribute", "weight": 2 }, { "content": "Buildings and utility poles along both sides of the street can be seen in the background.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward. The silver car reappears in the scene and proceeds to drive forward. The traffic signal light ahead is red, prompting the silver car to slow down and stop. In front of the left side of the car, a person rides an electric tricycle. There is a white billboard with blue text on the left side of the road. On the left side of the intersection, a silver SUV is parked.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The silver car appears in the scene again.", "type": "attribute", "weight": 2 }, { "content": "The silver car moves forward.", "type": "action", "weight": 3 }, { "content": "The traffic signal light at the upcoming intersection is red.", "type": "scene", "weight": 2 }, { "content": "The silver car slows down and stops.", "type": "action", "weight": 3 }, { "content": "In front of the left side of the car, a person rides an electric tricycle.", "type": "attribute", "weight": 2 }, { "content": "There is a white billboard on the left side of the road.", "type": "scene", "weight": 2 }, { "content": "The billboard has blue text on it.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the intersection, there is a silver SUV on the road.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 26 }, { "index": "TUNA_0956", "video_path": "CoVLA/2022-07-21--15-11-45--19_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on an intersection. There is a pedestrian and a cyclist waiting to cross at the zebra crossing on the left side of the traffic lights at the intersection. A few cars are queuing on the adjacent lane waiting to pass. A building on the left side of the scene has the sign \"昭和自動車\", and there are some parked vehicles in front of the building.\nThe traffic lights turn green, and the camera starts moving forward. After passing the intersection, a row of vehicles are parked on the right lane. A building on the left side of the road has the sign \"住友フォークリフト\", and a white minivan is parked in front of the building. A person rides a bicycle along the road in front of the building.\nThe camera continues to move forward, and a silver car is seen driving ahead. A lane divider area appears in the middle of the road. Cars have been driving continuously on the right lane. The buildings on both sides of the road gradually become sparse, and there are some trees and utility poles on the right. The silver car slows down and arrives at another intersection. There is a huge red billboard on the building on the left side of the intersection. After two cars have successively completed right turns on the opposite side of the road, the silver car continues to drive straight.\nThe camera pans to the left, and a store appears in the scene, with a row of banners hanging in front of its entrance. The storefront has red advertising stickers. A silver car enters from the left side of the scene and exits from the right side. At the end of the video, a straight road appears in the scene, with a sign on the right side of the road reading \"大型車進入禁止\". A black car is ahead.", "events": [ { "event": "At the beginning of the video, the camera focuses on an intersection. There is a pedestrian and a cyclist waiting to cross at the zebra crossing on the left side of the traffic lights at the intersection. A few cars are queuing on the adjacent lane waiting to pass. A building on the left side of the scene has the sign \"昭和自動車\", and there are some parked vehicles in front of the building.", "visual_elements": [ { "content": "The camera focuses on an intersection.", "type": "camera", "weight": 3 }, { "content": "On the left side of the traffic lights at the intersection, a pedestrian and a cyclist wait to cross at the zebra crossing.", "type": "attribute", "weight": 2 }, { "content": "Several cars queue on the adjacent lane waiting to pass.", "type": "attribute", "weight": 2 }, { "content": "A building on the left side of the scene has the sign \"昭和自動車\".", "type": "attribute", "weight": 2 }, { "content": "There are some parked vehicles in front of the building.", "type": "attribute", "weight": 1 } ] }, { "event": "The traffic lights turn green, and the camera starts moving forward. After passing the intersection, a row of vehicles are parked on the right lane. A building on the left side of the road has the sign \"住友フォークリフト\", and a white minivan is parked in front of the building. A person rides a bicycle along the road in front of the building.", "visual_elements": [ { "content": "The traffic lights turn green.", "type": "action", "weight": 2 }, { "content": "The camera starts to move forward.", "type": "camera", "weight": 3 }, { "content": "After passing the intersection, a row of vehicles are parked on the right lane.", "type": "attribute", "weight": 2 }, { "content": "A building on the left side of the road has the sign \"住友フォークリフト\".", "type": "attribute", "weight": 1 }, { "content": "Next to the road, a white minivan is parked in front of the building.", "type": "attribute", "weight": 1 }, { "content": "A person rides a bicycle along the road in front of the building.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward, and a silver car is seen driving ahead. A lane divider area appears in the middle of the road. Cars have been driving continuously on the right lane. The buildings on both sides of the road gradually become sparse, and there are some trees and utility poles on the right. The silver car slows down and arrives at another intersection. There is a huge red billboard on the building on the left side of the intersection. After two cars have successively completed right turns on the opposite side of the road, the silver car continues to drive straight.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "There is a silver car driving ahead.", "type": "action", "weight": 3 }, { "content": "A lane divider area appears in the middle of the road.", "type": "attribute", "weight": 1 }, { "content": "Cars drive continuously on the right lane.", "type": "action", "weight": 2 }, { "content": "The buildings on both sides of the road gradually become sparse.", "type": "scene", "weight": 1 }, { "content": "The silver car slows down and arrives at another intersection.", "type": "action", "weight": 2 }, { "content": "There is a huge red billboard on the building on the left side of the intersection.", "type": "scene", "weight": 2 }, { "content": "After two cars successively complete right turns on the opposite side of the road, the silver car continues to drive straight.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans to the left, and a store appears in the scene, with a row of banners hanging in front of its entrance. The storefront has red advertising stickers. A silver car enters from the left side of the scene and exits from the right side. At the end of the video, a straight road appears in the scene, with a sign on the right side of the road reading \"大型車進入禁止\". A black car is ahead.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "A store appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "A row of banners hang in front of the store's entrance.", "type": "attribute", "weight": 1 }, { "content": "The storefront has red advertising stickers.", "type": "attribute", "weight": 2 }, { "content": "A silver car enters from the left side of the scene and exits from the right side.", "type": "action", "weight": 2 }, { "content": "At the end of the video, a straight road appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "There is a sign on the right side of the road reading \"大型車進入禁止\".", "type": "attribute", "weight": 2 }, { "content": "A black car is ahead on the road.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 27 }, { "index": "TUNA_0957", "video_path": "CoVLA/2022-07-21--15-11-45--44_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "As the video begins, the camera focuses on a narrow street, with residential areas on both sides. There are clear traffic signs and markings on the street, with white arrows and text on the ground. The houses on the left side of the street have brick walls, while those on the right have wooden fences and green plants.\nThe camera pans forward, passing through an intersection, and the road ahead is an uphill stretch. In front of a house on the right side of the road, a black car is parked, and on the left, a short stone wall is covered with green plants. Ahead of the camera is a T-junction, and on the left side in front of the junction, a person wearing a helmet is riding a motorcycle with a red trunk.\nThe camera pans to the left, revealing gray and blue exterior walls of a house at the corner, with black fences installed. After the camera stops panning and moves forward, a straight road appears in the scene, with a bicycle parked in front of a red brick wall on the right side of the road. In front of a house on the left side of the road, a white car is parked. At the end of the street, a T-junction appears. In the green belt at the junction, there is a yellow traffic sign, with a black arrow pointing to the left.\nThe camera slowly pans to the left, revealing sparse flowers and plants in the green belt. A person dressed in a floral shirt and gray pants enters the scene, bending down to work in the green belt. The person then leaves the scene. A row of neatly arranged houses appear in the scene.", "events": [ { "event": "As the video begins, the camera focuses on a narrow street, with residential areas on both sides. There are clear traffic signs and markings on the street, with white arrows and text on the ground. The houses on the left side of the street have brick walls, while those on the right have wooden fences and green plants.", "visual_elements": [ { "content": "The camera focuses on a narrow street.", "type": "camera", "weight": 2 }, { "content": "The streets are lined with residential areas.", "type": "scene", "weight": 2 }, { "content": "There are clear traffic signs and markings on the street.", "type": "attribute", "weight": 2 }, { "content": "The houses on the left side of the street have brick walls.", "type": "attribute", "weight": 2 }, { "content": "The houses on the right have wooden fences and green plants.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans forward, passing through an intersection, and the road ahead is an uphill stretch. In front of a house on the right side of the road, a black car is parked, and on the left, a short stone wall is covered with green plants. Ahead of the camera is a T-junction, and on the left side in front of the junction, a person wearing a helmet is riding a motorcycle with a red trunk.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "The scene passes through an intersection.", "type": "attribute", "weight": 2 }, { "content": "The road ahead is an uphill stretch.", "type": "attribute", "weight": 2 }, { "content": "In front of a house on the right side of the road, a black car is parked.", "type": "scene", "weight": 1 }, { "content": "A short stone wall on the left is covered with green plants.", "type": "scene", "weight": 1 }, { "content": "Ahead of the camera is a T-junction.", "type": "attribute", "weight": 3 }, { "content": "On the left side in front of the junction, a person wearing a helmet is riding a motorcycle.", "type": "attribute", "weight": 2 }, { "content": "The motorcycle's trunk is red.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans to the left, revealing gray and blue exterior walls of a house at the corner, with black fences installed. After the camera stops panning and moves forward, a straight road appears in the scene, with a bicycle parked in front of a red brick wall on the right side of the road. In front of a house on the left side of the road, a white car is parked. At the end of the street, a T-junction appears. In the green belt at the junction, there is a yellow traffic sign, with a black arrow pointing to the left.", "visual_elements": [ { "content": "The camera pans to the left.", "type": "camera", "weight": 3 }, { "content": "At the corner, the gray and blue exterior walls of a house have black fences installed.", "type": "scene", "weight": 3 }, { "content": "The camera stops panning and moves forward.", "type": "camera", "weight": 3 }, { "content": "A straight road appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road, a bicycle is parked in front of a red brick wall.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, a white car is parked in front of a house.", "type": "attribute", "weight": 2 }, { "content": "At the end of the street, a T-junction appears.", "type": "attribute", "weight": 3 }, { "content": "In the green belt at the T-junction, there is a yellow traffic sign.", "type": "attribute", "weight": 2 }, { "content": "A black arrow on the sign points to the left.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera slowly pans to the left, revealing sparse flowers and plants in the green belt. A person dressed in a floral shirt and gray pants enters the scene, bending down to work in the green belt. The person then leaves the scene. A row of neatly arranged houses appear in the scene.", "visual_elements": [ { "content": "The camera slowly pans to the left.", "type": "camera", "weight": 3 }, { "content": "Sparse flowers and plants are distributed in the green belt.", "type": "scene", "weight": 2 }, { "content": "A person dressed in a floral shirt and gray pants enters the scene.", "type": "attribute", "weight": 2 }, { "content": "The person is bending down and busy in the green belt.", "type": "action", "weight": 2 }, { "content": "The person leaves the scene.", "type": "attribute", "weight": 1 }, { "content": "A row of neatly arranged houses appear in the scene.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0958", "video_path": "CoVLA/2022-07-25--10-16-29--124_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a red-brick building, whose bottom section's exterior wall is composed of grayish-white marble. In front of the building, there are green plants and bushes. The camera slowly pans to the left, revealing an arched door on the red-brick building. Subsequently, the camera continues to pan left, capturing a man in a white shirt and black pants walking on the sidewalk on the right side of the road, with a black backpack on his back. More pedestrians walk along the sidewalk ahead of the man with backpack.\nAfter the camera stops panning and moves forward, the scene shows a street with a row of black guardrails on the right side. There are lush green plants in front of the buildings on both sides of the street. On the left side of the street, a woman in uniform is walking towards the camera. Behind her, there is a red vending machine on the roadside. Pedestrians constantly come and go on both sides of the street.\nAs the camera continues to move forward, an advertisement board appears on the left side of the street. Behind the board, there is a 7-Eleven convenience store on the first floor of a building, with parking lines painted on the empty space in front of the store. A grayish-black SUV is parked in the parking area. On the left side of the road, there is a pedestrian wearing a white short-sleeved shirt and black pants, holding a black bag in his hand, and walking forwards with his back to the camera.\nThe camera swings left and right as it moves forward a short distance before stopping. On the right side of the opposite road, there is a red fire hydrant sign, with a red car parked under it. A white van faces the camera, turning on its left turn signal and bypassing the red car from behind to drive forward. The background features modern building complexes.", "events": [ { "event": "At the beginning of the video, the camera focuses on a red-brick building, whose bottom section's exterior wall is composed of grayish-white marble. In front of the building, there are green plants and bushes. The camera slowly pans to the left, revealing an arched door on the red-brick building. Subsequently, the camera continues to pan left, capturing a man in a white shirt and black pants walking on the sidewalk on the right side of the road, with a black backpack on his back. More pedestrians walk along the sidewalk ahead of the man with backpack.", "visual_elements": [ { "content": "The camera focuses on a red-brick building.", "type": "camera", "weight": 3 }, { "content": "The exterior wall of the building's bottom section is composed of grayish-white marble.", "type": "attribute", "weight": 2 }, { "content": "There are some green plants and bushes in front of the building.", "type": "scene", "weight": 2 }, { "content": "The camera slowly pans to the left.", "type": "camera", "weight": 3 }, { "content": "The red-brick building has an arched door.", "type": "attribute", "weight": 1 }, { "content": "The camera continues to pan to the left.", "type": "camera", "weight": 3 }, { "content": "On the sidewalk on the right side of the road, there is a man wearing a white shirt and black pants.", "type": "attribute", "weight": 2 }, { "content": "The man walks on the sidewalk with a black backpack on his back.", "type": "action", "weight": 2 }, { "content": "In front of the backpack-carrying man, there are more pedestrians walking along the sidewalk.", "type": "action", "weight": 2 } ] }, { "event": "After the camera stops panning and moves forward, the scene shows a street with a row of black guardrails on the right side. There are lush green plants in front of the buildings on both sides of the street. On the left side of the street, a woman in uniform is walking towards the camera. Behind her, there is a red vending machine on the roadside. Pedestrians constantly come and go on both sides of the street.", "visual_elements": [ { "content": "The camera stops panning and then moves forward.", "type": "camera", "weight": 3 }, { "content": "It is a street in the scene.", "type": "attribute", "weight": 2 }, { "content": "There is a row of black guardrails on the right side of the street.", "type": "scene", "weight": 2 }, { "content": "There are lush green plants in front of the buildings on both sides of the street.", "type": "scene", "weight": 1 }, { "content": "On the left side of the street, a woman in uniform is walking towards the direction of the camera.", "type": "action", "weight": 2 }, { "content": "Behind the woman, there is a red vending machine on the roadside.", "type": "attribute", "weight": 2 }, { "content": "Pedestrians constantly come and go on both sides of the street.", "type": "scene", "weight": 2 } ] }, { "event": "As the camera continues to move forward, an advertisement board appears on the left side of the street. Behind the board, there is a 7-Eleven convenience store on the first floor of a building, with parking lines painted on the empty space in front of the store. A grayish-black SUV is parked in the parking area. On the left side of the road, there is a pedestrian wearing a white short-sleeved shirt and black pants, holding a black bag in his hand, and walking forwards with his back to the camera.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "A billboard appears on the left side of the street.", "type": "scene", "weight": 2 }, { "content": "Behind the billboard, there is a 7-Eleven convenience store on the first floor of a building.", "type": "scene", "weight": 1 }, { "content": "There are parking lines painted on the empty space in front of the convenience store.", "type": "attribute", "weight": 2 }, { "content": "A grayish-black SUV is parked in the parking area.", "type": "scene", "weight": 2 }, { "content": "On the left side of the road, there is a pedestrian wearing a white short-sleeved shirt and black pants.", "type": "attribute", "weight": 2 }, { "content": "The pedestrian is holding a black bag in his hand.", "type": "attribute", "weight": 2 }, { "content": "The pedestrian is walking forward with his back to the camera.", "type": "action", "weight": 2 } ] }, { "event": "The camera swings left and right as it moves forward a short distance before stopping. On the right side of the opposite road, there is a red fire hydrant sign, with a red car parked under it. A white van faces the camera, turning on its left turn signal and bypassing the red car from behind to drive forward. The background features modern building complexes.", "visual_elements": [ { "content": "The camera swings left and right as it moves forward a short distance before stopping.", "type": "camera", "weight": 3 }, { "content": "On the right side of the opposite road, there is a red fire hydrant sign.", "type": "attribute", "weight": 1 }, { "content": "A red car is parked under the sign.", "type": "attribute", "weight": 2 }, { "content": "A white van is facing the camera.", "type": "attribute", "weight": 2 }, { "content": "It turns on its left turn signal and bypasses the red car from behind to drive forward.", "type": "action", "weight": 3 }, { "content": "The background features modern building complexes.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 30 }, { "index": "TUNA_0959", "video_path": "CoVLA/2022-07-25--10-16-29--134_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "As the video begins, the camera focuses on a wide urban road. The weather is clear, with some white clouds in the sky. There are lush trees on both sides of the road. On the right lane, there is a black car facing the camera. On the left lane in front of the camera, there is a large white bus.\nThe camera pans forward, and the word \"Kusakabe\" on the bus is clearly visible. There is another similar bus in front of it. Cars and motorcycles are constantly passing on the right lane, and there are many pedestrians on the sidewalk on the right side of the road.\nThe camera continues to move forward, and both buses on the left lane leave the scene. The traffic light at the upcoming intersection shows green, and there is a red building on the right side of the intersection. There are some red cone roadblocks on a section of the road on the right. On the left lane in front of the intersection, there is a yellow engineering vehicle, and in front of it, there is a white bus.\nThe camera continues to move forward. The bus on the left side of the road drives towards its right front and then stops in the middle of the road. The camera stops moving. A man wearing a white shirt and gray suit pants is following behind the bus. He makes a gesture indicating to stop to the camera and then waves at the bus driver to signal. The bus reverses direction and backs up to the left side of the scene. There are blue dedicated lanes on both sides of the road, and several cars are parked at intervals on the dedicated lane on the right side of the road.", "events": [ { "event": "As the video begins, the camera focuses on a wide urban road. The weather is clear, with some white clouds in the sky. There are lush trees on both sides of the road. On the right lane, there is a black car facing the camera. On the left lane in front of the camera, there is a large white bus.", "visual_elements": [ { "content": "The camera focuses on a wide urban road.", "type": "camera", "weight": 3 }, { "content": "The weather is clear, with some white clouds in the sky.", "type": "scene", "weight": 2 }, { "content": "There are lush trees on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "There is a black car facing the camera on the right lane.", "type": "attribute", "weight": 2 }, { "content": "On the left lane in front of the camera, there is a large white bus.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans forward, and the word \"Kusakabe\" on the bus is clearly visible. There is another similar bus in front of it. Cars and motorcycles are constantly passing on the right lane, and there are many pedestrians on the sidewalk on the right side of the road.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "The word \"Kusakabe\" on the bus is clearly visible.", "type": "attribute", "weight": 2 }, { "content": "There is another similar bus in front of it.", "type": "attribute", "weight": 2 }, { "content": "Cars and motorcycles are constantly passing on the right lane.", "type": "action", "weight": 2 }, { "content": "There are many pedestrians on the sidewalk on the right side of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward, and both buses on the left lane leave the scene. The traffic light at the upcoming intersection shows green, and there is a red building on the right side of the intersection. There are some red cone roadblocks on a section of the road on the right. On the left lane in front of the intersection, there is a yellow engineering vehicle, and in front of it, there is a white bus.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Both buses on the left lane leave the scene.", "type": "attribute", "weight": 1 }, { "content": "The traffic light at the upcoming intersection shows green.", "type": "attribute", "weight": 2 }, { "content": "There is a red building on the right side of the intersection.", "type": "attribute", "weight": 1 }, { "content": "There are some red cone roadblocks on a section of the road on the right.", "type": "attribute", "weight": 1 }, { "content": "On the left lane in front of the intersection, there is a yellow engineering vehicle.", "type": "attribute", "weight": 2 }, { "content": "In front of the engineering vehicle, there is a white bus.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. The bus on the left side of the road drives towards its right front and then stops in the middle of the road. The camera stops moving. A man wearing a white shirt and gray suit pants is following behind the bus. He makes a gesture indicating to stop to the camera and then waves at the bus driver to signal. The bus reverses direction and backs up to the left side of the scene. There are blue dedicated lanes on both sides of the road, and several cars are parked at intervals on the dedicated lane on the right side of the road.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The bus on the left side of the road drives towards its right front.", "type": "action", "weight": 3 }, { "content": "The white bus stops in the middle of the road.", "type": "attribute", "weight": 3 }, { "content": "The camera stops moving.", "type": "camera", "weight": 3 }, { "content": "A man wearing a white shirt and gray suit pants follows behind the bus.", "type": "attribute", "weight": 3 }, { "content": "The male makes a gesture to the camera indicating to stop car.", "type": "action", "weight": 2 }, { "content": "The man waves at the bus driver to signal.", "type": "action", "weight": 2 }, { "content": "The bus reverses direction and backs up to the left side of the scene.", "type": "action", "weight": 3 }, { "content": "There are blue dedicated lanes on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "Several cars are parked at intervals on the dedicated lane on the right side of the road.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 27 }, { "index": "TUNA_0960", "video_path": "CoVLA/2022-07-25--10-16-29--58_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "As the video begins, the camera is fixed inside a moving car, facing the road ahead. In the center of the scene is a white car with a blurred license plate. To the left front of the white car is a large truck, and the front of another car enters the scene on the left side of the scene. There are white barriers on the right side of the road, and green vegetation surrounds the road on both sides.\nThe camera pans forward, capturing the white car and truck driving ahead. Above the road, there is a metal scene spanning the road, equipped with multiple cameras and sensors. The vegetation on both sides of the road gradually becomes sparse, and some low-rise buildings appear behind the green vegetation on the right side of the road. Streetlights are neatly arranged on both sides of the road.\nThe camera continues to move forward. The truck ahead turns on its left turn signal and then disappears from the scene. A section of black guardrails appears on the right side of the road. On the right side of the guardrails is the opposing lane where vehicles are driving on it.\nThe camera continues to move forward, and the vegetation on both sides of the road transforms into elevated metal grids. The black guardrails disappear from the scene, and a white dividing line is painted in the middle of the two lanes. The camera follows the vehicles ahead as they make a right turn and then continues to move forward. Ahead on the road, there is another metal scene spanning the road, with blue and yellow signboards on it.", "events": [ { "event": "As the video begins, the camera is fixed inside a moving car, facing the road ahead. In the center of the scene is a white car with a blurred license plate. To the left front of the white car is a large truck, and the front of another car enters the scene on the left side of the scene. There are white barriers on the right side of the road, and green vegetation surrounds the road on both sides.", "visual_elements": [ { "content": "The camera is fixed inside a moving car.", "type": "camera", "weight": 3 }, { "content": "The camera faces the road ahead.", "type": "camera", "weight": 3 }, { "content": "In the center of the scene is a white car.", "type": "attribute", "weight": 2 }, { "content": "The license plate of the white car is blurred and not visible.", "type": "attribute", "weight": 1 }, { "content": "To the left front of the white car is a large truck.", "type": "attribute", "weight": 2 }, { "content": "The front end of another car enters the scene on the left side.", "type": "attribute", "weight": 1 }, { "content": "There are white barriers on the right side of the road.", "type": "attribute", "weight": 2 }, { "content": "Green vegetation surrounds both sides of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans forward, capturing the white car and truck driving ahead. Above the road, there is a metal scene spanning the road, equipped with multiple cameras and sensors. The vegetation on both sides of the road gradually becomes sparse, and some low-rise buildings appear behind the green vegetation on the right side of the road. Streetlights are neatly arranged on both sides of the road.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "The white car and the truck are driving ahead.", "type": "action", "weight": 3 }, { "content": "There is a metal scene spanning across the road above.", "type": "attribute", "weight": 2 }, { "content": "Multiple cameras and sensors are installed on the scene.", "type": "attribute", "weight": 2 }, { "content": "The vegetation on both sides of the road gradually becomes sparse.", "type": "scene", "weight": 1 }, { "content": "Some low-rise buildings appear behind the green vegetation on the right side of the road.", "type": "scene", "weight": 1 }, { "content": "Streetlights are neatly arranged on both sides of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward. The truck ahead turns on its left turn signal and then disappears from the scene. A section of black guardrails appears on the right side of the road. On the right side of the guardrails is the opposing lane where vehicles are driving on it.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The truck ahead turns on its left turn signal.", "type": "attribute", "weight": 1 }, { "content": "Then the truck disappears from the scene.", "type": "attribute", "weight": 2 }, { "content": "A section of black guardrails appears on the right side of the road.", "type": "attribute", "weight": 2 }, { "content": "The right side of the guardrails is the opposing lane.", "type": "attribute", "weight": 2 }, { "content": "Vehicles are driving on the opposing lane.", "type": "action", "weight": 2 } ] }, { "event": "The camera continues to move forward, and the vegetation on both sides of the road transforms into elevated metal grids. The black guardrails disappear from the scene, and a white dividing line is painted in the middle of the two lanes. The camera follows the vehicles ahead as they make a right turn and then continues to move forward. Ahead on the road, there is another metal scene spanning the road, with blue and yellow signboards on it.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The vegetation on both sides of the road transforms into elevated metal grids.", "type": "attribute", "weight": 2 }, { "content": "The black guardrails disappear from the scene.", "type": "attribute", "weight": 2 }, { "content": "White dividing lines are painted in the middle of the two lanes.", "type": "attribute", "weight": 1 }, { "content": "The camera follows the vehicles ahead and passes a right turn.", "type": "camera", "weight": 3 }, { "content": "Then the camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Another metal scene spanning across the road appears ahead.", "type": "attribute", "weight": 2 }, { "content": "There are blue and yellow signboards on the scene.", "type": "attribute", "weight": 2 }, { "content": "The camera stops as the vehicles ahead just pass the metal scene.", "type": "camera", "weight": 3 }, { "content": "The grids on both sides of the road transform into guardrails.", "type": "attribute", "weight": 2 }, { "content": "A person in a work uniform stands beside a red cone roadblock on the left side of the scene.", "type": "scene", "weight": 1 }, { "content": "There is a green hollowed-out building on the right side of the scene.", "type": "scene", "weight": 1 }, { "content": "A forest of tall buildings stands in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 34 }, { "index": "TUNA_0961", "video_path": "CoVLA/2022-07-25--13-35-05--31_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focuses on a black Mercedes-Benz ML 350 4MATIC SUV. To the right of the Mercedes is a metal fence with weeds growing underneath it. To the right of the fence is the opposing lane, where vehicles are continuously driving. Ahead is a traffic intersection with a yellow traffic light. In the background, tall buildings and a pedestrian overpass can be seen, with the words \"In Front of Qingshan College\" written on it. The weather is sunny, with some white clouds in the sky.\nSubsequently, the traffic light changes from yellow to red, and the brake lights of the black Mercedes SUV illuminate as it begins to move forward slowly. Below the red light on the traffic signal ahead, the right-turn arrow is green, and the camera starts to move forward.\nThe Mercedes turns right at the intersection, and the camera pans to the right along with the movement of the Mercedes. The Mercedes ahead enters a narrower road. A yellow solid line is painted in the middle of the road. A long line of vehicles is waiting to pass on the right lane, and a speed limit sign of 40 is painted on the ground of the left lane. There is a row of trees and shrubs on the right side of the street.\nThe camera continues to move forward, and ahead is another traffic intersection with a red traffic light. The Mercedes ahead begins to slow down. There is already a car stopped in front of the Mercedes. The camera pans to the right front, shifting the focus to the adjacent lane on the right of the Mercedes. Ahead is a white van, and on the right opposing lane, cars and motorcycles are lined up waiting to move forward.", "events": [ { "event": "The video begins with the camera focuses on a black Mercedes-Benz ML 350 4MATIC SUV. To the right of the Mercedes is a metal fence with weeds growing underneath it. To the right of the fence is the opposing lane, where vehicles are continuously driving. Ahead is a traffic intersection with a yellow traffic light. In the background, tall buildings and a pedestrian overpass can be seen, with the words \"In Front of Qingshan College\" written on it. The weather is sunny, with some white clouds in the sky.", "visual_elements": [ { "content": "The camera focuses on a black Mercedes-Benz ML 350 4MATIC SUV.", "type": "camera", "weight": 3 }, { "content": "There is a metal fence on the right side of the Mercedes.", "type": "attribute", "weight": 2 }, { "content": "Weeds are growing under the fence.", "type": "attribute", "weight": 1 }, { "content": "To the right of the fence is the opposing lane.", "type": "attribute", "weight": 2 }, { "content": "There are continuously vehicles driving on the opposing lane.", "type": "attribute", "weight": 2 }, { "content": "Ahead is a traffic intersection with traffic lights.", "type": "attribute", "weight": 2 }, { "content": "The traffic signal light is yellow.", "type": "attribute", "weight": 1 }, { "content": "In the background, tall buildings and a pedestrian overpass can be seen.", "type": "scene", "weight": 2 }, { "content": "The pedestrian overpass has the words \"In Front of Qingshan College\" written on it.", "type": "attribute", "weight": 2 }, { "content": "The weather is sunny, with some white clouds in the sky.", "type": "scene", "weight": 2 } ] }, { "event": "Subsequently, the traffic light changes from yellow to red, and the brake lights of the black Mercedes SUV illuminate as it begins to move forward slowly. Below the red light on the traffic signal ahead, the right-turn arrow is green, and the camera starts to move forward.", "visual_elements": [ { "content": "The traffic signal light changes from yellow to red.", "type": "attribute", "weight": 2 }, { "content": "The brake lights of the black Mercedes-Benz SUV illuminate.", "type": "attribute", "weight": 2 }, { "content": "The Mercedes-Benz begins to move forward slowly.", "type": "action", "weight": 3 }, { "content": "Below the red light on the traffic signal ahead, the right-turn arrow is green.", "type": "attribute", "weight": 2 }, { "content": "The camera starts to move forward.", "type": "camera", "weight": 3 } ] }, { "event": "The Mercedes turns right at the intersection, and the camera pans to the right along with the movement of the Mercedes. The Mercedes ahead enters a narrower road. A yellow solid line is painted in the middle of the road. A long line of vehicles is waiting to pass on the right lane, and a speed limit sign of 40 is painted on the ground of the left lane. There is a row of trees and shrubs on the right side of the street.", "visual_elements": [ { "content": "The Mercedes-Benz turns right at the intersection.", "type": "action", "weight": 3 }, { "content": "The camera also pans to the right along with the movement of the Mercedes-Benz.", "type": "camera", "weight": 3 }, { "content": "The Mercedes-Benz ahead enters a narrower road.", "type": "attribute", "weight": 2 }, { "content": "A yellow solid line is painted in the middle of the road.", "type": "attribute", "weight": 2 }, { "content": "A long line of vehicles is waiting to pass on the right lane.", "type": "attribute", "weight": 2 }, { "content": "A speed limit sign of 40 is painted on the ground of the left lane.", "type": "attribute", "weight": 1 }, { "content": "There is a row of trees and shrubs on the right side of the street.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward, and ahead is another traffic intersection with a red traffic light. The Mercedes ahead begins to slow down. There is already a car stopped in front of the Mercedes. The camera pans to the right front, shifting the focus to the adjacent lane on the right of the Mercedes. Ahead is a white van, and on the right opposing lane, cars and motorcycles are lined up waiting to move forward.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Ahead is another traffic intersection with traffic lights.", "type": "attribute", "weight": 2 }, { "content": "The traffic signal light is red.", "type": "attribute", "weight": 2 }, { "content": "The Mercedes-Benz ahead begins to slow down.", "type": "action", "weight": 3 }, { "content": "There is already a car stopped in front of the Mercedes-Benz.", "type": "attribute", "weight": 2 }, { "content": "The camera pans to the right front.", "type": "camera", "weight": 3 }, { "content": "The focus shifts to the adjacent lane on the right of the Mercedes-Benz.", "type": "camera", "weight": 3 }, { "content": "There is a white van ahead.", "type": "attribute", "weight": 2 }, { "content": "There are cars and motorcycles queuing to move forward on the right opposing lane.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 31 }, { "index": "TUNA_0962", "video_path": "CoVLA/2022-08-01--13-53-21--53_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a city street, with a white Toyota Crown car ahead. The license plate is blurred. Several cars are queuing on the adjacent lane waiting to pass. There are various billboards and signs on the buildings lining the street. There is a speed limit sign of 40 kilometers per hour on the streetlight on the left side of the road ahead. A woman dressed in a gray uniform and wearing a mask walks on the sidewalk on the left, holding a dark umbrella. She walks towards the direction of the camera and finally walks out of the scene. The traffic light at the intersection ahead is red.\nThe traffic light at the intersection ahead changes from red to green, and the Toyota car starts to move forward slowly. The vehicles on the next lane also start to move ahead. The camera begins to pan forward. There is a person on the left sidewalk wearing a mask, a yellow top, and dark shorts. This person is sitting on a bicycle with one foot touching the ground. In front of this person is a male dressed in a dark suit and wearing a mask, carrying a black bag and standing at the zebra crossing waiting to cross.\nThe camera continues to move forward. There is a convenience store on the first floor of a building on the left side of the intersection opposite. The storefront of the convenience store has the logo of \"LAWSON\". There are people walking and others standing and looking down at something in front of the convenience store. The lane next to the Honda car has continuous oncoming traffic. The shot stops when the Toyota car is in the middle of the intersection. There is a bus with the word \"KASHIBA\" on its body on the right side of the scene, moving in the opposite direction to the Toyota car. There are brightly colored billboards on the high-rise buildings at the end of the street in the background.", "events": [ { "event": "At the beginning of the video, the camera focuses on a city street, with a white Toyota Crown car ahead. The license plate is blurred. Several cars are queuing on the adjacent lane waiting to pass. There are various billboards and signs on the buildings lining the street. There is a speed limit sign of 40 kilometers per hour on the streetlight on the left side of the road ahead. A woman dressed in a gray uniform and wearing a mask walks on the sidewalk on the left, holding a dark umbrella. She walks towards the direction of the camera and finally walks out of the scene. The traffic light at the intersection ahead is red.", "visual_elements": [ { "content": "The camera focuses on a city street.", "type": "camera", "weight": 3 }, { "content": "Ahead is a white Toyota Crown car.", "type": "attribute", "weight": 2 }, { "content": "The license plate of the Toyota car is blurred.", "type": "attribute", "weight": 2 }, { "content": "Several cars are queuing on the adjacent lane waiting to pass.", "type": "attribute", "weight": 2 }, { "content": "There are various billboards and signs on the buildings lining the street.", "type": "scene", "weight": 2 }, { "content": "There is a speed limit sign of 40 kilometers per hour on the streetlight on the left side of the road ahead.", "type": "attribute", "weight": 2 }, { "content": "On the sidewalk on the left, there is a woman dressed in a gray uniform and wearing a mask, holding a dark umbrella.", "type": "attribute", "weight": 2 }, { "content": "The woman walks on the sidewalk towards the direction of the camera and finally walks out of the scene.", "type": "action", "weight": 2 }, { "content": "The traffic light at the intersection ahead is red.", "type": "attribute", "weight": 2 } ] }, { "event": "The traffic light at the intersection ahead changes from red to green, and the Toyota car starts to move forward slowly. The vehicles on the next lane also start to move ahead. The camera begins to pan forward. There is a person on the left sidewalk wearing a mask, a yellow top, and dark shorts. This person is sitting on a bicycle with one foot touching the ground. In front of this person is a male dressed in a dark suit and wearing a mask, carrying a black bag and standing at the zebra crossing waiting to cross.", "visual_elements": [ { "content": "The traffic light at the intersection ahead changes from red to green.", "type": "attribute", "weight": 2 }, { "content": "The Toyota car starts to move forward slowly.", "type": "action", "weight": 3 }, { "content": "The vehicles on the next lane also start to move ahead.", "type": "action", "weight": 3 }, { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "There is a person on the left sidewalk wearing a mask, a yellow top, and dark shorts.", "type": "scene", "weight": 2 }, { "content": "This person is sitting on a bicycle with one foot touching the ground.", "type": "attribute", "weight": 2 }, { "content": "In front of this person is a man wearing a mask and dressed in a dark suit, carrying a black bag.", "type": "scene", "weight": 2 }, { "content": "This man stands at the zebra crossing waiting to cross.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. There is a convenience store on the first floor of a building on the left side of the intersection opposite. The storefront of the convenience store has the logo of \"LAWSON\". There are people walking and others standing and looking down at something in front of the convenience store. The lane next to the Honda car has continuous oncoming traffic. The shot stops when the Toyota car is in the middle of the intersection. There is a bus with the word \"KASHIBA\" on its body on the right side of the scene, moving in the opposite direction to the Toyota car. There are brightly colored billboards on the high-rise buildings at the end of the street in the background.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "There is a convenience store on the first floor of a building on the left side of the intersection opposite.", "type": "scene", "weight": 2 }, { "content": "The storefront of the convenience store has the logo of \"LAWSON\".", "type": "attribute", "weight": 2 }, { "content": "In front of the convenience store, there are people walking and others standing and looking down at something.", "type": "scene", "weight": 2 }, { "content": "The lane next to the Honda car has continuous oncoming vehicles", "type": "attribute", "weight": 2 }, { "content": "The shot stops when the Toyota car is in the middle of the intersection.", "type": "camera", "weight": 3 }, { "content": "There is a bus with the word \"KASHIBA\" on its body on the right side of the scene.", "type": "attribute", "weight": 2 }, { "content": "The bus drives in the opposite direction to the Toyota car.", "type": "attribute", "weight": 2 }, { "content": "There are brightly colored billboards on the high-rise buildings at the end of the street in the background.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 26 }, { "index": "TUNA_0963", "video_path": "CoVLA/2022-08-09--13-20-00--84_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera faces the road ahead. There are white zebra crossings on the road ahead, with traffic lights above the zebra crossings. In front of the zebra crossings, there is a black car. To the left of the scene is a viaduct, and the white fence on the right side of the black car forms an isolation belt planted with green grass. To the right of the isolation belt is the opposite lane.\nThe camera starts to move forward. After traveling a short distance, the black car enters a small curve, where the single lane on the road turns into a double lane, and there are also vehicles traveling in the adjacent lane. In an empty space under the viaduct on the left side of the road, three cars are parked neatly.\nThe camera continues to move forward. The viaduct on the left side leaves the scene, and the black car continues to move forward. As the car passes a branching intersection, there is a black building on the left side of the intersection, with advertising posters posted on the building. In the empty space in front of the building, there are several cars and a row of banners.\nThe camera continues to move forward. The black car travels through an intersection where the traffic lights are green. On the sidewalk to the left of the intersection, there is a truck with a panda cartoon image painted on its body. On the left side of the opposite intersection, there is a shop with its name written on the storefront.\nThe camera continues to move forward. The buildings on both sides of the road are neatly arranged, and some vehicles are parked in front of some buildings. Utility poles and wires are lined up along both sides of the road. At the end of the video, yellow billboards and a yellow van are placed in front of the buildings on the left side of the scene.", "events": [ { "event": "At the beginning of the video, the camera faces the road ahead. There are white zebra crossings on the road ahead, with traffic lights above the zebra crossings. In front of the zebra crossings, there is a black car. To the left of the scene is a viaduct, and the white fence on the right side of the black car forms an isolation belt planted with green grass. To the right of the isolation belt is the opposite lane.", "visual_elements": [ { "content": "The camera faces the road ahead.", "type": "camera", "weight": 3 }, { "content": "There are white zebra crossings on the road ahead.", "type": "scene", "weight": 2 }, { "content": "Above the zebra crossings is a traffic light.", "type": "attribute", "weight": 1 }, { "content": "In front of the zebra crossings, there is a black car.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the scene is a viaduct.", "type": "attribute", "weight": 2 }, { "content": "The white fence on the right side of the black car forms an isolation belt.", "type": "attribute", "weight": 2 }, { "content": "The isolation belt is planted with green grass.", "type": "attribute", "weight": 1 }, { "content": "To the right of the isolation belt is the opposite lane.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera starts to move forward. After traveling a short distance, the black car enters a small curve, where the single lane on the road turns into a double lane, and there are also vehicles traveling in the adjacent lane. In an empty space under the viaduct on the left side of the road, three cars are parked neatly.", "visual_elements": [ { "content": "The camera starts to move forward.", "type": "camera", "weight": 3 }, { "content": "The black car travels a short distance and then enters a small curve.", "type": "action", "weight": 3 }, { "content": "The single lane on the road turns into a dual lane.", "type": "attribute", "weight": 2 }, { "content": "There are also vehicles driving in the next lane.", "type": "scene", "weight": 2 }, { "content": "In an empty space under the viaduct on the left side of the road, three cars are parked neatly.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. The viaduct on the left side leaves the scene, and the black car continues to move forward. As the car passes a branching intersection, there is a black building on the left side of the intersection, with advertising posters posted on the building. In the empty space in front of the building, there are several cars and a row of banners.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The viaduct on the left side leaves the scene.", "type": "attribute", "weight": 1 }, { "content": "The black car continues to move forward.", "type": "action", "weight": 2 }, { "content": "The car passes a branching intersection.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the intersection, there is a black building.", "type": "attribute", "weight": 2 }, { "content": "Advertising posters are posted on the building.", "type": "attribute", "weight": 2 }, { "content": "In the empty space in front of the building, there are several cars and a row of banners.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. The black car travels through an intersection where the traffic lights are green. On the sidewalk to the left of the intersection, there is a truck with a panda cartoon image painted on its body. On the left side of the opposite intersection, there is a shop with its name written on the storefront.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The black car travels through an intersection.", "type": "attribute", "weight": 2 }, { "content": "The traffic light at the intersection is green.", "type": "attribute", "weight": 2 }, { "content": "On the sidewalk to the left of the intersection, there is a truck with a panda cartoon image painted on its body.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the opposite intersection, there is a shop.", "type": "attribute", "weight": 2 }, { "content": "The name of the shop is written on the storefront.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues to move forward. The buildings on both sides of the road are neatly arranged, and some vehicles are parked in front of some buildings. Utility poles and wires are lined up along both sides of the road. At the end of the video, yellow billboards and a yellow van are placed in front of the buildings on the left side of the scene.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The buildings on both sides of the road are neatly arranged.", "type": "scene", "weight": 2 }, { "content": "Some vehicles are parked in front of some buildings.", "type": "scene", "weight": 1 }, { "content": "Utility poles and wires are lined up along both sides of the road.", "type": "scene", "weight": 2 }, { "content": "At the end of the video, yellow billboards and a yellow van are placed in front of the buildings on the left side of the scene.", "type": "attribute", "weight": 2 } ] } ], "n_events": 5, "n_elements": 31 }, { "index": "TUNA_0964", "video_path": "CoVLA/2022-08-17--15-36-34--77_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a highway with two-way lanes, capturing the scene from inside a car looking forward. In front is a black car. The two lanes of the left lane are lined with neatly arranged vehicles, while there are no visible vehicles on the right lane. To the right of the right lane, an orange gas station can be seen, with white letters \"ENEOS\" on it. Behind the white wall on the left side of the road are green trees.\nThe camera pans to the left front, and the car in front transforms into a white Toyota van. The Toyota van moves forward. The camera continues to move forward as well. Above the road ahead is a blue road sign. Some advertising posters are pasted on the white wall to the left of the road sign. The line of vehicles on the right lane remains stationary.\nThe camera continues to move forward. The brake lights of the Toyota van light up, and it slows down to stop. The vehicles on the right lane also stopps to wait for traffic to pass. In the background, vehicles from the opposite lane drive by. The camera stops moving. On the left side of the road is a green wall, behind which is a factory building and a high-voltage tower. The brake lights of the Toyota van go off, and it starts to move forward slowly. The black car next to the Toyota van on the right lane also begins to move forward slowly.", "events": [ { "event": "At the beginning of the video, the camera focuses on a highway with two-way lanes, capturing the scene from inside a car looking forward. In front is a black car. The two lanes of the left lane are lined with neatly arranged vehicles, while there are no visible vehicles on the right lane. To the right of the right lane, an orange gas station can be seen, with white letters \"ENEOS\" on it. Behind the white wall on the left side of the road are green trees.", "visual_elements": [ { "content": "The camera is focused on a highway with two-way lanes.", "type": "camera", "weight": 3 }, { "content": "The perspective is captured from inside a car looking forward.", "type": "camera", "weight": 3 }, { "content": "In front is a black car.", "type": "attribute", "weight": 2 }, { "content": "The two lanes of the left lane are lined with neatly arranged vehicles.", "type": "attribute", "weight": 2 }, { "content": "There are no visible vehicles on the right lane.", "type": "attribute", "weight": 1 }, { "content": "An orange gas station can be seen on the right side of the right lane.", "type": "scene", "weight": 2 }, { "content": "The gas station has white letters \"ENEOS\" on it.", "type": "attribute", "weight": 2 }, { "content": "Behind the white wall on the left side of the road are green trees.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans to the left front, and the car in front transforms into a white Toyota van. The Toyota van moves forward. The camera continues to move forward as well. Above the road ahead is a blue road sign. Some advertising posters are pasted on the white wall to the left of the road sign. The line of vehicles on the right lane remains stationary.", "visual_elements": [ { "content": "The camera pans to the left front.", "type": "camera", "weight": 3 }, { "content": "The car in front transforms into a white Toyota van.", "type": "attribute", "weight": 2 }, { "content": "The Toyota van moves forward.", "type": "action", "weight": 3 }, { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Above the road ahead is a blue road sign.", "type": "attribute", "weight": 2 }, { "content": "Some advertising posters are pasted on the white wall to the left of the road sign.", "type": "scene", "weight": 1 }, { "content": "The line of vehicles on the right lane remains stationary.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. The brake lights of the Toyota van light up, and it slows down to stop. The vehicles on the right lane also stopps to wait for traffic to pass. In the background, vehicles from the opposite lane drive by. The camera stops moving. On the left side of the road is a green wall, behind which is a factory building and a high-voltage tower. The brake lights of the Toyota van go off, and it starts to move forward slowly. The black car next to the Toyota van on the right lane also begins to move forward slowly.", "visual_elements": [ { "content": "The camera keeps moving forward.", "type": "camera", "weight": 3 }, { "content": "The brake lights of the Toyota van light up.", "type": "attribute", "weight": 2 }, { "content": "The Toyota van slows down to stop.", "type": "action", "weight": 3 }, { "content": "The vehicles on the right lane also stops to wait for traffic to pass.", "type": "attribute", "weight": 2 }, { "content": "In the background, vehicles from the opposite lane drive by.", "type": "scene", "weight": 1 }, { "content": "The camera stops moving.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road is a green wall.", "type": "scene", "weight": 2 }, { "content": "Behind the wall is a factory building and a high-voltage tower.", "type": "scene", "weight": 2 }, { "content": "The brake lights of the Toyota van go off.", "type": "attribute", "weight": 2 }, { "content": "The Toyota van starts to move forward slowly.", "type": "action", "weight": 2 }, { "content": "The black car next to the Toyota van on the right lane also begins to move forward slowly.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 26 }, { "index": "TUNA_0965", "video_path": "CoVLA/2022-08-26--10-17-35--17_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The first half of the video shows a stationary shot that focuses on the rear of a white truck parked on the left lane of the road. The rear of the truck has yellow and red reflective strips, and the license plate is blurred out. A blue truck's cab is partially visible on the right side of the scene. In front of the white truck, there is a black car. To the left of the white truck, there is a sidewalk. Next to the sidewalk, there is a section of black fencing, and above the fencing, there is a green belt planted with green trees and grass, with a few trees also adorned with red flowers. Vehicles continuously pass on the opposite lane.\nThe white truck begins to move forward slowly, and the black car in the next lane also starts to move. The blue truck first starts slowly and then accelerates, with its gray cargo compartment also coming into the scene. There are white arrows and the text \"Sudden Pursuiot Caution\" on the ground. Behind the black fencing on the left side of the scene, there is an empty area where vehicles are parked. In front of the empty area, there is a wall, and the trees behind the wall are growing very vigorously.", "events": [ { "event": "The first half of the video shows a stationary shot that focuses on the rear of a white truck parked on the left lane of the road. The rear of the truck has yellow and red reflective strips, and the license plate is blurred out. A blue truck's cab is partially visible on the right side of the scene. In front of the white truck, there is a black car. To the left of the white truck, there is a sidewalk. Next to the sidewalk, there is a section of black fencing, and above the fencing, there is a green belt planted with green trees and grass, with a few trees also adorned with red flowers. Vehicles continuously pass on the opposite lane.", "visual_elements": [ { "content": "The first half of the video remains stationary.", "type": "camera", "weight": 3 }, { "content": "The camera focuses on the rear of a white truck.", "type": "camera", "weight": 3 }, { "content": "The truck is parked on the left lane of the road.", "type": "attribute", "weight": 3 }, { "content": "The rear of the truck has yellow and red reflective strips.", "type": "attribute", "weight": 2 }, { "content": "The license plate is blurred out.", "type": "attribute", "weight": 2 }, { "content": "The cab of a blue truck is partially visible on the right side of the scene.", "type": "attribute", "weight": 2 }, { "content": "There is a black car in front of the blue truck.", "type": "attribute", "weight": 2 }, { "content": "There is a sidewalk on the left side of the white truck.", "type": "attribute", "weight": 2 }, { "content": "Next to the sidewalk, there is a section of black fencing.", "type": "attribute", "weight": 1 }, { "content": "Inside the fencing, there is a green belt planted with green trees and grass.", "type": "scene", "weight": 2 }, { "content": "A few trees are also adorned with red flowers.", "type": "scene", "weight": 2 }, { "content": "Vehicles continuously pass on the opposite lane.", "type": "scene", "weight": 2 } ] }, { "event": "The white truck begins to move forward slowly, and the black car in the next lane also starts to move. The blue truck first starts slowly and then accelerates, with its gray cargo compartment also coming into the scene. There are white arrows and the text \"Sudden Pursuiot Caution\" on the ground. Behind the black fencing on the left side of the scene, there is an empty area where vehicles are parked. In front of the empty area, there is a wall, and the trees behind the wall are growing very vigorously.", "visual_elements": [ { "content": "The white truck begins to move forward slowly.", "type": "action", "weight": 3 }, { "content": "The black car in the adjacent lane also starts to move.", "type": "action", "weight": 3 }, { "content": "The blue truck first starts slowly and then accelerates.", "type": "action", "weight": 3 }, { "content": "The gray cargo compartment of the blue truck also comes into the scene.", "type": "attribute", "weight": 2 }, { "content": "There are white arrows and the text \"Sudden Pursuit Caution\" on the ground.", "type": "attribute", "weight": 2 }, { "content": "Behind the black fencing on the left side of the scene, there is an empty area where vehicles are parked.", "type": "scene", "weight": 2 }, { "content": "In front of the empty area, there is a wall.", "type": "attribute", "weight": 2 }, { "content": "The trees behind the wall are growing very vigorously.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 20 }, { "index": "TUNA_0966", "video_path": "CoVLA/2022-08-31--14-45-14--75_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a wide highway. There are three lanes in total, with a yellow engineering vehicle traveling in the middle lane. The bidirectional lanes are separated by gray w-beam guardrails, and there is a white truck on the lane to the right of the guardrails. Both sides of the highway have tall sound barriers, with green plants growing both in front of and behind them. The sky is clear.\nThe camera keeps moving forward. In the middle of the video, the camera remains behind the engineering vehicle. At this point, a silver car appears on the right side of the scene, traveling in the lane to the right of the engineering vehicle and gradually overtaking it. During this process, the camera pans forward to the right, and the view ahead changes to the silver car. The camera pans forward and gradually approaches the engineering vehicle. The equipment and signboards inside the engineering vehicle are clearly visible.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wide highway. There are three lanes in total, with a yellow engineering vehicle traveling in the middle lane. The bidirectional lanes are separated by gray w-beam guardrails, and there is a white truck on the lane to the right of the guardrails. Both sides of the highway have tall sound barriers, with green plants growing both in front of and behind them. The sky is clear.", "visual_elements": [ { "content": "The camera focuses on a wide highway.", "type": "camera", "weight": 3 }, { "content": "There are three lanes in total in the scene.", "type": "attribute", "weight": 3 }, { "content": "A yellow engineering vehicle is traveling in the middle lane ahead.", "type": "attribute", "weight": 2 }, { "content": "The bidirectional lanes are separated by gray w-beam guardrails.", "type": "attribute", "weight": 2 }, { "content": "There is a white truck on the lane to the right of the guardrails.", "type": "attribute", "weight": 1 }, { "content": "Both sides of the highway have tall sound barriers.", "type": "attribute", "weight": 2 }, { "content": "Green plants are growing both in front of and behind the sound barriers.", "type": "scene", "weight": 2 }, { "content": "The sky is clear.", "type": "scene", "weight": 2 } ] }, { "event": "The camera keeps moving forward. In the middle of the video, the camera remains behind the engineering vehicle. At this point, a silver car appears on the right side of the scene, traveling in the lane to the right of the engineering vehicle and gradually overtaking it. During this process, the camera pans forward to the right, and the view ahead changes to the silver car. The camera pans forward and gradually approaches the engineering vehicle. The equipment and signboards inside the engineering vehicle are clearly visible.", "visual_elements": [ { "content": "The camera keeps moving forward.", "type": "camera", "weight": 3 }, { "content": "In the middle of the video, the camera remains behind the engineering vehicle.", "type": "camera", "weight": 3 }, { "content": "A silver car appears on the right side of the scene.", "type": "attribute", "weight": 3 }, { "content": "The car travels in the lane to the right of the engineering vehicle and gradually overtakes it.", "type": "action", "weight": 3 }, { "content": "The camera pans forward to the right.", "type": "camera", "weight": 3 }, { "content": "The view ahead changes to the silver car.", "type": "camera", "weight": 3 }, { "content": "The camera pans forward and gradually approaches the engineering vehicle.", "type": "camera", "weight": 3 }, { "content": "The engineering vehicle is carrying some construction equipment and signboards.", "type": "attribute", "weight": 2 } ] } ], "n_events": 2, "n_elements": 16 }, { "index": "TUNA_0967", "video_path": "CoVLA/2022-09-05--14-57-03--6_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the start of the video, the camera focuses on the rear of a black Toyota VOXY car parked on the left lane of a bidirectional road. The brake lights of the car are on, and the license plate is blurred. Trees and buildings can be seen on both sides of the road in the background. There is a blue circular road sign on the left side ahead, and a red-and-white building on the right side. A silver car passes on the right lane.\nThe camera pans forward a short distance and then stops. The black Toyota VOXY car ahead starts to move. The camera starts moving forward again. A motorcycle passes on the right lane, and there is a person riding a bicycle on the left sidewalk. The traffic light at the intersection ahead is green. The Toyota car turns left with its left turn signal on. A gray van drive straight on the opposite lane.\nThe camera pans to the right, continues panning forward after making a right turn, and a straight road appears in the scene. There is a white building on the left side of the road, with red decorative strips on it. There is a white car ahead, and there are white lane divider lines in the middle of the road. There is a lake-blue truck in the opposite lane, followed by two other cars. There are two people riding bicycles on the sidewalk to the right. Utility poles and wires are lined up along both sides of the road. There is a multi-story building in the back right of the scene.", "events": [ { "event": "At the start of the video, the camera focuses on the rear of a black Toyota VOXY car parked on the left lane of a bidirectional road. The brake lights of the car are on, and the license plate is blurred. Trees and buildings can be seen on both sides of the road in the background. There is a blue circular road sign on the left side ahead, and a red-and-white building on the right side. A silver car passes on the right lane.", "visual_elements": [ { "content": "The camera focuses on the rear of a black Toyota VOXY car.", "type": "camera", "weight": 3 }, { "content": "The car is parked on the left lane of a bidirectional road.", "type": "scene", "weight": 3 }, { "content": "The brake lights of the car are on.", "type": "attribute", "weight": 2 }, { "content": "The license plate is blurred.", "type": "attribute", "weight": 2 }, { "content": "Trees and buildings can be seen on both sides of the road in the background.", "type": "scene", "weight": 2 }, { "content": "There is a blue circular road sign on the left side ahead.", "type": "attribute", "weight": 1 }, { "content": "There is a red-and-white building on the right side.", "type": "attribute", "weight": 2 }, { "content": "A silver car passes on the right lane.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans forward a short distance and then stops. The black Toyota VOXY car ahead starts to move. The camera starts moving forward again. A motorcycle passes on the right lane, and there is a person riding a bicycle on the left sidewalk. The traffic light at the intersection ahead is green. The Toyota car turns left with its left turn signal on. A gray van drive straight on the opposite lane.", "visual_elements": [ { "content": "The camera pans forward a short distance and then stops.", "type": "camera", "weight": 3 }, { "content": "The black Toyota VOXY car ahead starts to move.", "type": "action", "weight": 3 }, { "content": "The camera starts moving forward again.", "type": "camera", "weight": 3 }, { "content": "A motorcycle passes on the right lane.", "type": "action", "weight": 2 }, { "content": "There is a person riding a bicycle on the left sidewalk.", "type": "attribute", "weight": 2 }, { "content": "The traffic light at the intersection ahead is green.", "type": "attribute", "weight": 2 }, { "content": "The Toyota car turns left with its left turn signal on.", "type": "action", "weight": 3 }, { "content": "A gray van drive straight on the opposite straight lane.", "type": "action", "weight": 2 } ] }, { "event": "The camera pans to the right, continues panning forward after making a right turn, and a straight road appears in the scene. There is a white building on the left side of the road, with red decorative strips on it. There is a white car ahead, and there are white lane divider lines in the middle of the road. There is a lake-blue truck in the opposite lane, followed by two other cars. There are two people riding bicycles on the sidewalk to the right. Utility poles and wires are lined up along both sides of the road. There is a multi-story building in the back right of the scene.", "visual_elements": [ { "content": "The camera pans to the right.", "type": "camera", "weight": 3 }, { "content": "After panning to the right, the camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "A straight road appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "There is a white building on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "The building has red decorative strips on it.", "type": "attribute", "weight": 1 }, { "content": "There is a white car ahead.", "type": "attribute", "weight": 2 }, { "content": "There are white lane divider lines in the middle of the road.", "type": "attribute", "weight": 2 }, { "content": "There is a lake-blue truck in the opposite lane.", "type": "attribute", "weight": 2 }, { "content": "The truck is followed by two other cars.", "type": "attribute", "weight": 2 }, { "content": "There are two people riding bicycles on the right sidewalk.", "type": "attribute", "weight": 2 }, { "content": "Utility poles and wires are lined up along both sides of the road.", "type": "scene", "weight": 2 }, { "content": "There is a multi-story building in the back right of the scene.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 28 }, { "index": "TUNA_0968", "video_path": "CoVLA/2022-09-05--19-59-44--6_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a city road at night. In front of the camera is a white van, with its license plate blurred. Next to the white van on the adjacent lane is a gray car. The traffic light at the upcoming intersection is green. There are also vehicles on the opposing lane.\nThe camera starts to move forward, following the white van as it drives ahead. Vehicles on the right lane also move forward. Vehicles on the opposing lane are slowly driving forward with their headlights on. There is a row of white trucks on the opposing lane. Above the road ahead are two road signs. The left road sign is overexposed by the light, while the right blue road sign indicates the destination and remaining distance.\nThe camera continues to move forward. On the left side of the road is a sidewalk, followed by a greenbelt. Inside the greenbelt, there is a row of billboards, each with a single word on it. When combined, all the billboards spell out \"Residential Exhibition Venue\". To the left of the billboards is a brightly lit house.\nThe camera continues to move forward. On the left side of the road is a brightly lit gas station with the sign \"ENEOS\". There are many billboards inside the gas station. After passing the gas station, an electronic screen appears ahead, displaying road information. In the top right corner of the electronic screen, a red warning light is constantly blinking. On the left side of the road, there are guardrails, and yellow and black warning tapes are posted on the poles behind the guardrails.", "events": [ { "event": "At the beginning of the video, the camera focuses on a city road at night. In front of the camera is a white van, with its license plate blurred. Next to the white van on the adjacent lane is a gray car. The traffic light at the upcoming intersection is green. There are also vehicles on the opposing lane.", "visual_elements": [ { "content": "The camera focuses on a city road at night.", "type": "camera", "weight": 3 }, { "content": "In front of the camera is a white van.", "type": "attribute", "weight": 2 }, { "content": "The license plate is blurred.", "type": "attribute", "weight": 2 }, { "content": "There is a gray car on the lane next to the white van.", "type": "attribute", "weight": 2 }, { "content": "The traffic light at the upcoming intersection is green.", "type": "attribute", "weight": 2 }, { "content": "There are also vehicles on the opposing lane.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera starts to move forward, following the white van as it drives ahead. Vehicles on the right lane also move forward. Vehicles on the opposing lane are slowly driving forward with their headlights on. There is a row of white trucks on the opposing lane. Above the road ahead are two road signs. The left road sign is overexposed by the light, while the right blue road sign indicates the destination and remaining distance.", "visual_elements": [ { "content": "The camera starts to move forward.", "type": "camera", "weight": 3 }, { "content": "The vehicles on the lane begin to drive ahead.", "type": "action", "weight": 3 }, { "content": "There is a row of white trucks on the opposing lane.", "type": "attribute", "weight": 2 }, { "content": "Two road signs hang above the road ahead.", "type": "attribute", "weight": 2 }, { "content": "The left road sign is overexposed by the light.", "type": "attribute", "weight": 2 }, { "content": "The right blue road sign indicates the destination and remaining distance.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. On the left side of the road is a sidewalk, followed by a greenbelt. Inside the greenbelt, there is a row of billboards, each with a single word on it. When combined, all the billboards spell out \"Residential Exhibition Venue\". To the left of the billboards is a brightly lit house.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Behind the sidewalk on the left side of the road is a greenbelt.", "type": "attribute", "weight": 2 }, { "content": "There is a row of billboards inserted in the greenbelt.", "type": "attribute", "weight": 2 }, { "content": "Each billboard has only one word on it.", "type": "attribute", "weight": 2 }, { "content": "All the billboards combined spell out \"Residential Exhibition Venue\".", "type": "attribute", "weight": 2 }, { "content": "To the left of the billboards is a brightly lit house.", "type": "scene", "weight": 1 } ] }, { "event": "The camera continues to move forward. On the left side of the road is a brightly lit gas station with the sign \"ENEOS\". There are many billboards inside the gas station. After passing the gas station, an electronic screen appears ahead, displaying road information. In the top right corner of the electronic screen, a red warning light is constantly blinking. On the left side of the road, there are guardrails, and yellow and black warning tapes are posted on the poles behind the guardrails.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road is a brightly lit gas station.", "type": "attribute", "weight": 2 }, { "content": "The sign of the gas station is \"ENEOS\".", "type": "attribute", "weight": 2 }, { "content": "There are many billboards inside the gas station.", "type": "attribute", "weight": 1 }, { "content": "After passing the gas station, an electronic screen appears ahead, displaying road information.", "type": "attribute", "weight": 2 }, { "content": "In the top right corner of the electronic screen, a red warning light is constantly blinking.", "type": "attribute", "weight": 2 }, { "content": "Guardrails appear on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "Yellow and black warning tapes are posted on the pillars behind the guardrails.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 26 }, { "index": "TUNA_0969", "video_path": "CoVLA/2022-09-08--15-26-15--52_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a highway with several cars driving ahead. The closest one to the camera is a white car in the right lane. Above the highway, there is a green traffic sign indicating the exits and directions ahead, including \"Ginza\", \"Omiya\" and \"Tohoku Expressway\" among others. There is a yellow traffic sign on the guardrail on the left side of the highway. There is also a greenbelt area on the outer left side of the highway.\nAs the camera continues to move forward, a large black sign appears above the road ahead, with orange-colored text on it. Below the sign, on the left guardrail, there is a vertically placed display screen that alternately flashes \"Caution Ahead\" and \"渋滞中は\".\nThe camera continues to move forward, and the road ahead enters an uphill section. The white guardrail on the right side of the road transforms into red and white traffic barriers arranged at regular intervals. Vehicles driving on the road to the right of the barriers can be seen in the video. Subsequently, the barriers disappear, and the three lanes revert to two lanes. A truck with a white cab and pink trailer gradually enters the scene in the right lane. Above the road ahead, there is a large green traffic sign, and below the sign, on the left pillar, there is the same display screen as in the previous scene. After passing the sign, the camera begins to accelerate forward and then stops.", "events": [ { "event": "At the beginning of the video, the camera focuses on a highway with several cars driving ahead. The closest one to the camera is a white car in the right lane. Above the highway, there is a green traffic sign indicating the exits and directions ahead, including \"Ginza\", \"Omiya\" and \"Tohoku Expressway\" among others. There is a yellow traffic sign on the guardrail on the left side of the highway. There is also a greenbelt area on the outer left side of the highway.", "visual_elements": [ { "content": "The camera focuses on a highway.", "type": "camera", "weight": 3 }, { "content": "There are several cars driving ahead.", "type": "attribute", "weight": 3 }, { "content": "The closest one to the camera is a white car in the right lane.", "type": "attribute", "weight": 2 }, { "content": "There is a green traffic sign above the highway.", "type": "attribute", "weight": 2 }, { "content": "The sign indicates the exits and directions ahead, including \"Ginza\", \"Omiya\" and \"Tohoku Expressway\" among others.", "type": "attribute", "weight": 2 }, { "content": "On the guardrail to the left of the highway, there's a yellow sign.", "type": "attribute", "weight": 2 }, { "content": "There is a greenbelt area lies outside on the left side of the highway.", "type": "scene", "weight": 1 } ] }, { "event": "As the camera continues to move forward, a large black sign appears above the road ahead, with orange-colored text on it. Below the sign, on the left guardrail, there is a vertically placed display screen that alternately flashes \"Caution Ahead\" and \"渋滞中は\".", "visual_elements": [ { "content": "The camera starts to move forward.", "type": "camera", "weight": 3 }, { "content": "All the cars on the two-lane highway drive forward.", "type": "attribute", "weight": 3 }, { "content": "After the camera passing the green sign, a protruding area appears on the leftmost side of the road.", "type": "attribute", "weight": 2 }, { "content": "This area is served as a temporary parking zone.", "type": "attribute", "weight": 1 }, { "content": "Streetlights in two rows are neatly arranged on the white guardrails lined on both sides of the highway.", "type": "attribute", "weight": 2 }, { "content": "In the distance, some buildings and electricity towers can be seen.", "type": "scene", "weight": 2 }, { "content": "The weather is gloomy, with the sky appearing grayish-white color.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward, and the road ahead enters an uphill section. The white guardrail on the right side of the road transforms into red and white traffic barriers arranged at regular intervals. Vehicles driving on the road to the right of the barriers can be seen in the video. Subsequently, the barriers disappear, and the three lanes revert to two lanes. A truck with a white cab and pink trailer gradually enters the scene in the right lane. Above the road ahead, there is a large green traffic sign, and below the sign, on the left pillar, there is the same display screen as in the previous scene. After passing the sign, the camera begins to accelerate forward and then stops.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The road ahead enters an uphill section.", "type": "attribute", "weight": 3 }, { "content": "The white guardrail on the right side transforms into red and white traffic barriers arranged at regular intervals.", "type": "attribute", "weight": 2 }, { "content": "With vehicles driving on the road to the right of the barriers, quite a lot.", "type": "attribute", "weight": 2 }, { "content": "The barriers then disappear, and the three lanes revert to two lanes.", "type": "attribute", "weight": 2 }, { "content": "As a truck with a white cab and pink trailer gradually enters the scene.", "type": "attribute", "weight": 2 }, { "content": "Above the road ahead, there's a large green traffic sign.", "type": "attribute", "weight": 2 }, { "content": "On the left pillar below the sign, there's a display screen same with the one in the previous scene.", "type": "attribute", "weight": 2 }, { "content": "After passing the sign, the camera starts to accelerate forward and then stops.", "type": "camera", "weight": 3 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0970", "video_path": "CoVLA/2022-09-13--08-20-11--27_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of another segment of the video, the camera focuses on a highway, with the perspective shot from inside a car looking forward. Ahead is a cable-stayed bridge with its cables and towers clearly visible. The weather is gloomy, with the sky appearing grayish-blue. In front of the camera is a white car, and in the left lane, a truck is driving abreast of the white car. The lanes in both directions are separated by a white guardrail, and there are many vehicles on the lane to the right of the guardrail.\nThe camera starts to move forward, as all the vehicles in the lanes also begin to drive ahead. The white car continues to stay in front of the camera, while the truck in the left lane and the red car in front of it gradually disappear from the scene. The cables and towers of the cable-stayed bridge disappear from the scene, and there are blue and green traffic signs above the road ahead.\nThe camera continues to move forward, with the white car still driving ahead in the scene. The blue truck and red truck in the left lane gradually disappear from the scene one by one. There are blue and green traffic signs ahead on the road, and behind the guardrail on the left side of the road is green vegetation, followed by white buildings. Streetlights are neatly arranged on both sides of the road. There is a high-voltage power pylon to the right rear of the road.", "events": [ { "event": "At the beginning of another segment of the video, the camera focuses on a highway, with the perspective shot from inside a car looking forward. Ahead is a cable-stayed bridge with its cables and towers clearly visible. The weather is gloomy, with the sky appearing grayish-blue. In front of the camera is a white car, and in the left lane, a truck is driving abreast of the white car. The lanes in both directions are separated by a white guardrail, and there are many vehicles on the lane to the right of the guardrail.", "visual_elements": [ { "content": "The camera focuses on a highway.", "type": "camera", "weight": 3 }, { "content": "The perspective is shot forward from inside a car.", "type": "camera", "weight": 3 }, { "content": "Ahead is a cable-stayed bridge.", "type": "attribute", "weight": 2 }, { "content": "The weather is gloomy, with the sky appearing gray-blue color.", "type": "scene", "weight": 2 }, { "content": "In front of the camera is a white car.", "type": "attribute", "weight": 2 }, { "content": "A truck in the left lane drives alongside the white car.", "type": "attribute", "weight": 2 }, { "content": "The lanes in both directions are separated by a white guardrail.", "type": "attribute", "weight": 2 }, { "content": "There are many vehicles on the lane to the right of the guardrail.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera starts to move forward, as all the vehicles in the lanes also begin to drive ahead. The white car continues to stay in front of the camera, while the truck in the left lane and the red car in front of it gradually disappear from the scene. The cables and towers of the cable-stayed bridge disappear from the scene, and there are blue and green traffic signs above the road ahead.", "visual_elements": [ { "content": "The camera starts to move forward.", "type": "camera", "weight": 3 }, { "content": "All the vehicles in the lanes also start to move forward.", "type": "action", "weight": 3 }, { "content": "The white car continues to stay in front of the camera.", "type": "attribute", "weight": 2 }, { "content": "The truck in the left lane and the red car in front of it gradually disappear from the scene.", "type": "attribute", "weight": 2 }, { "content": "The steel cables and towers of the cable-stayed bridge disappear from the scene.", "type": "attribute", "weight": 2 }, { "content": "There are blue and green traffic signs above the road ahead.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward, with the white car still driving ahead in the scene. The blue truck and red truck in the left lane gradually disappear from the scene one by one. There are blue and green traffic signs ahead on the road, and behind the guardrail on the left side of the road is green vegetation, followed by white buildings. Streetlights are neatly arranged on both sides of the road. There is a high-voltage power pylon to the right rear of the road.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The white car still drives in front of the camera.", "type": "action", "weight": 2 }, { "content": "The blue truck and red truck in the left lane gradually disappear from the scene one by one.", "type": "attribute", "weight": 2 }, { "content": "Blue and green traffic signs appear again ahead of the road.", "type": "attribute", "weight": 2 }, { "content": "Behind the guardrail on the left side of the road is green vegetation.", "type": "scene", "weight": 2 }, { "content": "Behind the vegetation are white buildings.", "type": "scene", "weight": 1 }, { "content": "Streetlights are neatly arranged on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "There is a high-voltage power tower in the rear right of the road.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0971", "video_path": "CoVLA/2022-09-21--16-58-26--24_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focused on a highway. Tall soundproof barriers line both sides of the road, with the tops curving inward. In front of the camera is a silver van with its tail lights on, and a few other vehicles are driving ahead. The sky is overcast, with thick clouds and dim lighting. The two-way lanes of the highway are separated by a solid guardrail.\nThe camera starts moving forward as vehicles drive along the highway. On the right lane, a white van gradually approaches and overtakes the silver van in front of the camera. The soundproof barrier on the left side of the highway disappears, revealing a toll booth with a sign displaying \"1620円\" in front. There are electronic displays next to the toll booth showing the toll fee and other information. The soundproof barrier on the left expands to the left side of the toll booth. The camera continues panning forward, passing through the toll booth where the roads on both sides are separated by red and white barricades. Vehicles are also driving on the road to the left of the barricades.\nThe camera pans to the right front, shifting the silver van from the center of the camera to the left front. The red and white barricades disappear from the scene. The camera then moves back to the left front, bringing the silver van back to the center of the scene.", "events": [ { "event": "The video begins with the camera focused on a highway. Tall soundproof barriers line both sides of the road, with the tops curving inward. In front of the camera is a silver van with its tail lights on, and a few other vehicles are driving ahead. The sky is overcast, with thick clouds and dim lighting. The two-way lanes of the highway are separated by a solid guardrail.", "visual_elements": [ { "content": "The camera focuses on a highway.", "type": "camera", "weight": 3 }, { "content": "Tall soundproof barriers line both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "The tops of the soundproof barriers curve inward.", "type": "attribute", "weight": 2 }, { "content": "In front of the camera is a silver van with its tail lights on.", "type": "attribute", "weight": 2 }, { "content": "A few other vehicles are driving ahead.", "type": "action", "weight": 2 }, { "content": "The sky is overcast, with thick clouds and dim lighting.", "type": "scene", "weight": 2 }, { "content": "The two-way lanes of the highway are separated by a solid guardrail.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera starts moving forward as vehicles drive along the highway. On the right lane, a white van gradually approaches and overtakes the silver van in front of the camera. The soundproof barrier on the left side of the highway disappears, revealing a toll booth with a sign displaying \"1620円\" in front. There are electronic displays next to the toll booth showing the toll fee and other information. The soundproof barrier on the left expands to the left side of the toll booth. The camera continues panning forward, passing through the toll booth where the roads on both sides are separated by red and white barricades. Vehicles are also driving on the road to the left of the barricades.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "Vehicles drive along the highway.", "type": "action", "weight": 3 }, { "content": "On the right lane, a white van gradually approaches and overtakes the silver van in front of the camera.", "type": "action", "weight": 3 }, { "content": "The soundproof barrier on the left side of the highway disappears.", "type": "attribute", "weight": 1 }, { "content": "A toll booth appears ahead.", "type": "scene", "weight": 2 }, { "content": "The sign in front of the toll booth shows \"1620円\".", "type": "attribute", "weight": 2 }, { "content": "There are some electronic displays next to the toll booth.", "type": "attribute", "weight": 2 }, { "content": "The soundproof barrier on the left expands to the left side of the toll booth.", "type": "attribute", "weight": 2 }, { "content": "The camera continues panning forward, passing the toll booth.", "type": "camera", "weight": 3 }, { "content": "The roads on both sides of the toll booth are separated by red and white barricades.", "type": "attribute", "weight": 2 }, { "content": "Vehicles are also driving on the road to the left of the barricades.", "type": "action", "weight": 2 } ] }, { "event": "The camera pans to the right front, shifting the silver van from the center of the camera to the left front. The red and white barricades disappear from the scene. The camera then moves back to the left front, bringing the silver van back to the center of the scene.", "visual_elements": [ { "content": "The camera pans to the right front.", "type": "camera", "weight": 3 }, { "content": "The silver van changes from the front of the camera to the front left of the camera", "type": "attribute", "weight": 2 }, { "content": "The red and white barricades disappear from the scene.", "type": "attribute", "weight": 2 }, { "content": "The camera then moves back to the left front.", "type": "camera", "weight": 3 }, { "content": "The silver van returns to the center of the scene.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0972", "video_path": "CoVLA/2022-10-17--16-27-49--31_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focused on an urban road. It is late in the evening, and the street is illuminated by streetlights and vehicle headlights. On both sides of the road are several buildings. On the left side, there is a multi-story building with lights on in front of the building and inside. A few cars drive ahead, and a blue traffic sign is visible above the road.\nThe camera starts moving forward, and the vehicles ahead begin to move. There are white traffic lines and the words \"Caution for sudden overtaking\" on the road. On the left side of the road, vehicles are parked in front of some buildings. As the camera pans forward, a white guardrail appears on the left side of the road. Behind the guardrail, several gray vans are neatly lined up, with red and green banners in front of their hoods. On the right side of the road, there is a green billboard.\nThe camera continues panning forward. When the traffic light ahead turns yellow, a white car speeds through the intersection. The traffic light then turns red, and the camera stops moving. Under the red light, a green light appears, and vehicles from the opposite direction begin to turn right. One car makes a U-turn at the intersection. The green light disappears, and the red light turns yellow.\nThis follows the complete and accurate translation format per the specified rules.", "events": [ { "event": "The video begins with the camera focused on an urban road. It is late in the evening, and the street is illuminated by streetlights and vehicle headlights. On both sides of the road are several buildings. On the left side, there is a multi-story building with lights on in front of the building and inside. A few cars drive ahead, and a blue traffic sign is visible above the road.", "visual_elements": [ { "content": "The camera focuses on an urban road.", "type": "camera", "weight": 3 }, { "content": "It is late in the evening, and the street is illuminated by streetlights and vehicle headlights.", "type": "scene", "weight": 3 }, { "content": "On both sides of the road are several buildings.", "type": "scene", "weight": 2 }, { "content": "On the left side, there is a multi-story building.", "type": "scene", "weight": 2 }, { "content": "The lights are on in front of the building and inside.", "type": "scene", "weight": 2 }, { "content": "A few cars drive ahead.", "type": "attribute", "weight": 2 }, { "content": "A blue traffic sign is visible above the road ahead.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera starts moving forward, and the vehicles ahead begin to move. There are white traffic lines and the words \"Caution for sudden overtaking\" on the road. On the left side of the road, vehicles are parked in front of some buildings. As the camera pans forward, a white guardrail appears on the left side of the road. Behind the guardrail, several gray vans are neatly lined up, with red and green banners in front of their hoods. On the right side of the road, there is a green billboard.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The vehicles ahead start moving forward.", "type": "action", "weight": 3 }, { "content": "There are white traffic lines on the road, with the words \"Caution for sudden overtaking.\"", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, vehicles are parked in front of some buildings.", "type": "attribute", "weight": 2 }, { "content": "As the camera pans forward, a white guardrail appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "Behind the guardrail, several gray vans are neatly lined up.", "type": "scene", "weight": 2 }, { "content": "Red and green banners are standing in front of their hoods.", "type": "scene", "weight": 2 }, { "content": "On the right side of the road, there is a green billboard.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning forward. When the traffic light ahead turns yellow, a white car speeds through the intersection. The traffic light then turns red, and the camera stops moving. Under the red light, a green light appears, and vehicles from the opposite direction begin to turn right. One car makes a U-turn at the intersection. The green light disappears, and the red light turns yellow.\nThis follows the complete and accurate translation format per the specified rules.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "When the traffic light ahead turns yellow, the white car accelerates through the intersection.", "type": "action", "weight": 3 }, { "content": "Then the traffic light turns red.", "type": "attribute", "weight": 2 }, { "content": "The camera stops moving.", "type": "camera", "weight": 3 }, { "content": "Under the red light, a green light appears.", "type": "attribute", "weight": 2 }, { "content": "Vehicles from the opposite direction at the intersection begin to turn right.", "type": "action", "weight": 3 }, { "content": "One of the cars makes a U-turn at the intersection.", "type": "action", "weight": 3 }, { "content": "The green light disappears, and the red light turns yellow.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0973", "video_path": "CoVLA/2022-10-20--10-31-55--172_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focusing on a two-way road with a solid yellow line dividing the lanes, under clear weather. On the left side of the road, there is a row of dense trees with green, red, and yellow leaves. On the right side of the road, there is a row of white buildings. The scene includes several cars and a red and blue double-decker bus.\nThe camera starts moving forward, with the double-decker bus ahead driving forward, and vehicles on the opposite lane also begin to move normally. On the left sidewalk, a few people are walking. There are some billboards on the buildings on the right side of the road.\nThe camera continues to move forward, and the bus drives to a junction. On the left side, a section of guardrail appears, behind which are some trees. The traffic light at the junction is green, and at the right turn intersection, several cars are waiting to pass. The straight road splits into two routes after passing the traffic light. There is a gateway at the entrance of the road on the right, and the road on the right is higher than the one on the left. To the left of the gateway, there is a blue road sign.\nThe camera pans left and moves forward. On the left side of the road, there is a lake with sunlight reflecting off the surface, creating sparkling ripples. On the right side of the road, there are still some trees, and in front of the trees, a row of cars is parked. There is a row of high-voltage power lines on the right side of the road. Pedestrians can occasionally be seen walking on both sides of the road. In the distance, the mountains stretch out. At the end of the video, there is a pavilion to the left front of the bus.", "events": [ { "event": "The video begins with the camera focusing on a two-way road with a solid yellow line dividing the lanes, under clear weather. On the left side of the road, there is a row of dense trees with green, red, and yellow leaves. On the right side of the road, there is a row of white buildings. The scene includes several cars and a red and blue double-decker bus.", "visual_elements": [ { "content": "The camera focuses on a two-way road.", "type": "camera", "weight": 3 }, { "content": "The dividing line between the two lanes is a solid yellow line.", "type": "attribute", "weight": 2 }, { "content": "The weather is clear.", "type": "scene", "weight": 2 }, { "content": "On the left side of the road, there is a row of dense trees.", "type": "scene", "weight": 2 }, { "content": "The leaves are green, red, and yellow.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road, there is a row of white buildings.", "type": "scene", "weight": 2 }, { "content": "There are several cars and a red and blue double-decker bus in the scene.", "type": "scene", "weight": 3 } ] }, { "event": "The camera starts moving forward, with the double-decker bus ahead driving forward, and vehicles on the opposite lane also begin to move normally. On the left sidewalk, a few people are walking. There are some billboards on the buildings on the right side of the road.", "visual_elements": [ { "content": "The camera starts to move forward.", "type": "camera", "weight": 3 }, { "content": "The double-decker bus ahead is moving forward.", "type": "action", "weight": 3 }, { "content": "Vehicles in the opposite lane also begin to move normally.", "type": "action", "weight": 3 }, { "content": "On the left sidewalk, several people are walking.", "type": "action", "weight": 2 }, { "content": "There are some billboards hanging on the buildings on the right side of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward, and the bus drives to a junction. On the left side, a section of guardrail appears, behind which are some trees. The traffic light at the junction is green, and at the right turn intersection, several cars are waiting to pass. The straight road splits into two routes after passing the traffic light. There is a gateway at the entrance of the road on the right, and the road on the right is higher than the one on the left. To the left of the gateway, there is a blue road sign.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The bus drives to a junction.", "type": "attribute", "weight": 3 }, { "content": "A section of guardrail appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "Behind the guardrail, there are some trees.", "type": "scene", "weight": 2 }, { "content": "The traffic light at the junction is green.", "type": "attribute", "weight": 2 }, { "content": "At the right turn intersection, several cars are waiting to pass.", "type": "attribute", "weight": 1 }, { "content": "The straight road splits into two routes after passing through the traffic lights.", "type": "attribute", "weight": 2 }, { "content": "There is an archway at the entrance to the road on the right side.", "type": "attribute", "weight": 2 }, { "content": "The terrain on the right side of the road is higher than that on the left side.", "type": "attribute", "weight": 2 }, { "content": "There is a blue road sign to the left of the archway.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans left and moves forward. On the left side of the road, there is a lake with sunlight reflecting off the surface, creating sparkling ripples. On the right side of the road, there are still some trees, and in front of the trees, a row of cars is parked. There is a row of high-voltage power lines on the right side of the road. Pedestrians can occasionally be seen walking on both sides of the road. In the distance, the mountains stretch out. At the end of the video, there is a pavilion to the left front of the bus.", "visual_elements": [ { "content": "The camera pans left and moves forward.", "type": "camera", "weight": 3 }, { "content": "On the left side of this road is a lake.", "type": "attribute", "weight": 2 }, { "content": "The sunlight reflects on the surface of the lake, creating shimmering waves.", "type": "attribute", "weight": 1 }, { "content": "The right side of the road still has some trees.", "type": "attribute", "weight": 2 }, { "content": "In the open area in front of the trees, a row of cars is parked.", "type": "attribute", "weight": 2 }, { "content": "There is a row of high-voltage power pylons on the right side of the road.", "type": "attribute", "weight": 2 }, { "content": "Occasionally, pedestrians can be seen walking on both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "The mountains in the distance are rolling.", "type": "scene", "weight": 2 }, { "content": "At the end of the video, there is a pavilion to the left front of the bus.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 31 }, { "index": "TUNA_0974", "video_path": "CoVLA/2022-10-20--10-31-55--176_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focused on a bus driving on a rural road. The bus is colored red, white, and blue, with a green pattern on the white part of the rear. On both sides of the road, there are dense trees with leaves in green, yellow, and red. On the left side of the road, there are also white road posts connected by three iron chains. The camera pans slowly forward with the bus. A round traffic mirror appears on the left side of the road. A black and gray car passes by in the opposite lane.\nThe camera pans right. At the turn, there is a wooden signpost, and the text on the sign is blurred. On both sides of the road after the turn, yellow fallen leaves are scattered on the ground. On the left side of the road, there is a fork in the road, and next to the flower bed at the fork, there is also a wooden signpost. At the end of the right turn, there is a left turn.\nThe camera pans left, turning from the fork in the road. After the left turn, a parking lot appears in the scene. The parking lot is marked with white dividing lines. Several cars are parked in an orderly manner. In the foreground, two pedestrians are standing, one of whom is bending down to check something. The background is a forest.\nThe camera pans right, turning toward the right side of the parking area. At each parking space, two stones are placed at one end to assist with braking. Above the right side of the parking area, there is a circle of black guardrails, behind which the trees have green and red leaves.", "events": [ { "event": "The video begins with the camera focused on a bus driving on a rural road. The bus is colored red, white, and blue, with a green pattern on the white part of the rear. On both sides of the road, there are dense trees with leaves in green, yellow, and red. On the left side of the road, there are also white road posts connected by three iron chains. The camera pans slowly forward with the bus. A round traffic mirror appears on the left side of the road. A black and gray car passes by in the opposite lane.", "visual_elements": [ { "content": "The camera focuses on a bus driving on a rural highway.", "type": "camera", "weight": 3 }, { "content": "The bus is colored red, white, and blue.", "type": "attribute", "weight": 2 }, { "content": "There is a green pattern on the white part of the rear of the bus.", "type": "attribute", "weight": 2 }, { "content": "There are dense trees on both sides of the highway.", "type": "attribute", "weight": 2 }, { "content": "The leaves are green, yellow, and red.", "type": "attribute", "weight": 2 }, { "content": "There are also white road posts on the left side of the road.", "type": "scene", "weight": 2 }, { "content": "The road posts are connected by three iron chains.", "type": "attribute", "weight": 2 }, { "content": "The camera pans slowly forward as the bus continues to move.", "type": "camera", "weight": 3 }, { "content": "A circular traffic mirror appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "A black and gray car passes the camera in the opposite lane.", "type": "action", "weight": 3 } ] }, { "event": "The camera pans right. At the turn, there is a wooden signpost, and the text on the sign is blurred. On both sides of the road after the turn, yellow fallen leaves are scattered on the ground. On the left side of the road, there is a fork in the road, and next to the flower bed at the fork, there is also a wooden signpost. At the end of the right turn, there is a left turn.", "visual_elements": [ { "content": "The camera pans right.", "type": "camera", "weight": 3 }, { "content": "A wooden road sign is erected at the corner.", "type": "attribute", "weight": 2 }, { "content": "The text on the road sign is blurred.", "type": "attribute", "weight": 2 }, { "content": "On both sides of the road after the right turn, yellow fallen leaves are piled on the ground.", "type": "attribute", "weight": 2 }, { "content": "There is a fork in the road on the left side.", "type": "attribute", "weight": 2 }, { "content": "A wooden sign is also erected in the flowerbed next to the forked road.", "type": "attribute", "weight": 2 }, { "content": "At the end of the road after the right turn, there is a left turn.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans left, turning from the fork in the road. After the left turn, a parking lot appears in the scene. The parking lot is marked with white dividing lines. Several cars are parked in an orderly manner. In the foreground, two pedestrians are standing, one of whom is bending down to check something. The background is a forest.", "visual_elements": [ { "content": "The camera pans left and turns at the forked road.", "type": "camera", "weight": 3 }, { "content": "After the left turn, a parking lot appears in the scene.", "type": "attribute", "weight": 3 }, { "content": "The parking lot has white dividing lines.", "type": "attribute", "weight": 2 }, { "content": "Several cars are parked in an orderly manner in the parking lot.", "type": "attribute", "weight": 2 }, { "content": "There are also two pedestrians standing ahead.", "type": "attribute", "weight": 2 }, { "content": "One of the pedestrians is bending over, inspecting something.", "type": "action", "weight": 2 }, { "content": "The background is a forest.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans right, turning toward the right side of the parking area. At each parking space, two stones are placed at one end to assist with braking. Above the right side of the parking area, there is a circle of black guardrails, behind which the trees have green and red leaves.", "visual_elements": [ { "content": "The camera pans right and turns toward the right parking area of the parking lot.", "type": "camera", "weight": 3 }, { "content": "At the end of each parking space, two stones are placed to assist with braking.", "type": "attribute", "weight": 1 }, { "content": "Above the right parking area of the lot, there is a black guardrail.", "type": "scene", "weight": 2 }, { "content": "Behind the guardrail, the trees have green and red leaves.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0975", "video_path": "CoVLA/2022-10-21--14-38-35--92_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focusing on an urban road, with a driver’s perspective. Ahead is a pedestrian bridge with the text “取手駅入口” written on it. In the middle of the bridge hangs a blue road sign, and the traffic light on the right side of the sign is green. An advertisement board is set up on the left side of the bridge. There are several different types of cars on the road ahead. The sky is clear, and the lighting is bright.\nThe camera begins to pan forward, and the vehicles in the lanes start moving as well. On the left side of the bridge, there is a staircase leading to the ground, with some greenery and utility poles beside it. After passing the bridge, there are buildings on both sides of the road. On the left is a white building, and on the right is a high-rise building with brown decorations.\nThe camera continues to move forward, and the buildings on both sides of the road gradually change. On the left, there are initially some residential buildings and a parking lot, followed by some shops and billboards. On the right, there is a white fence. The road has clear lane markings, and vehicles drive within their lanes. On the left side of the road, there is a gas station with an \"ENEOS\" sign. Beside the gas station is an electronic display showing the current fuel prices. On the right is a large parking lot, filled with many vehicles. Utility poles and wires are clearly visible on both sides of the road.\nThe camera pans right and forward, and the vehicle ahead changes from a white car to a van in the adjacent right lane. There are red billboards on both sides of the road, and on the left side, there is a blue traffic sign. White directional markings are on the road.\nThe camera pans right and forward again, and the two-lane road ahead turns into a three-lane road. The vehicle ahead changes to a black van in the far-right lane. A junction appears ahead, with a pedestrian bridge over it. At the junction, there are traffic lights and more traffic signs. On the left side, there is a billboard with the text \"AEON MALL,\" indicating the direction to the shopping mall. On the right side, there is a restaurant with the sign “GUSTO.”", "events": [ { "event": "The video begins with the camera focusing on an urban road, with a driver’s perspective. Ahead is a pedestrian bridge with the text “取手駅入口” written on it. In the middle of the bridge hangs a blue road sign, and the traffic light on the right side of the sign is green. An advertisement board is set up on the left side of the bridge. There are several different types of cars on the road ahead. The sky is clear, and the lighting is bright.", "visual_elements": [ { "content": "The camera focuses on an urban road.", "type": "camera", "weight": 3 }, { "content": "The perspective is from the driver’s point of view.", "type": "camera", "weight": 3 }, { "content": "Ahead in the scene, there is a pedestrian bridge.", "type": "attribute", "weight": 3 }, { "content": "The pedestrian bridge has the text \"取手駅入口\" written on it.", "type": "attribute", "weight": 2 }, { "content": "A blue road sign is hanging in the middle of the bridge.", "type": "attribute", "weight": 2 }, { "content": "The traffic light to the right of the road sign is green.", "type": "attribute", "weight": 1 }, { "content": "On the left side of the bridge, there is an advertisement board.", "type": "attribute", "weight": 2 }, { "content": "Several cars of different types are on the road ahead.", "type": "attribute", "weight": 3 }, { "content": "The sky is clear, and the lighting is bright.", "type": "scene", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and the vehicles in the lanes start moving as well. On the left side of the bridge, there is a staircase leading to the ground, with some greenery and utility poles beside it. After passing the bridge, there are buildings on both sides of the road. On the left is a white building, and on the right is a high-rise building with brown decorations.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The vehicles on the lanes also begin to move.", "type": "action", "weight": 3 }, { "content": "On the left side of the bridge, there is a staircase leading to the ground.", "type": "attribute", "weight": 2 }, { "content": "There are some greenery and utility poles next to the staircase.", "type": "scene", "weight": 2 }, { "content": "After passing the pedestrian bridge, buildings appear on both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "On the left side, there is a white building.", "type": "attribute", "weight": 2 }, { "content": "On the right side, there is a high-rise building with brown decorations.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward, and the buildings on both sides of the road gradually change. On the left, there are initially some residential buildings and a parking lot, followed by some shops and billboards. On the right, there is a white fence. The road has clear lane markings, and vehicles drive within their lanes. On the left side of the road, there is a gas station with an \"ENEOS\" sign. Beside the gas station is an electronic display showing the current fuel prices. On the right is a large parking lot, filled with many vehicles. Utility poles and wires are clearly visible on both sides of the road.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "On the left side, there are initially some residential buildings and a parking lot, followed by some shops and advertisement boards.", "type": "scene", "weight": 2 }, { "content": "The right side is separated by a white fence.", "type": "scene", "weight": 2 }, { "content": "The road has clear lane markings.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, there is a gas station with the \"ENEOS\" logo.", "type": "attribute", "weight": 2 }, { "content": "Next to the gas station, there is an electronic display showing the current fuel price.", "type": "attribute", "weight": 2 }, { "content": "On the right side, there is a large parking lot with many vehicles parked in it.", "type": "attribute", "weight": 2 }, { "content": "Utility poles and wires on both sides of the road are clearly visible.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans right and forward, and the vehicle ahead changes from a white car to a van in the adjacent right lane. There are red billboards on both sides of the road, and on the left side, there is a blue traffic sign. White directional markings are on the road.", "visual_elements": [ { "content": "The camera pans to the right front and then keeps moving forward", "type": "camera", "weight": 3 } ] }, { "event": "The camera pans right and forward again, and the two-lane road ahead turns into a three-lane road. The vehicle ahead changes to a black van in the far-right lane. A junction appears ahead, with a pedestrian bridge over it. At the junction, there are traffic lights and more traffic signs. On the left side, there is a billboard with the text \"AEON MALL,\" indicating the direction to the shopping mall. On the right side, there is a restaurant with the sign “GUSTO.”", "visual_elements": [ { "content": "The vehicle ahead changes from a white car to a van in the adjacent right lane.", "type": "attribute", "weight": 3 }, { "content": "There are red advertisement boards on both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, there is a blue traffic sign.", "type": "attribute", "weight": 2 }, { "content": "White directional markings are visible on the road.", "type": "attribute", "weight": 1 }, { "content": "The camera pans to the right and forward again, and then keeps moving forward.", "type": "camera", "weight": 3 }, { "content": "The two-lane road ahead turns into a three-lane road.", "type": "attribute", "weight": 2 }, { "content": "The vehicle in front of the camera turns into a black van in the rightmost lane.", "type": "attribute", "weight": 3 }, { "content": "An intersection appears ahead.", "type": "attribute", "weight": 2 }, { "content": "A pedestrian bridge spans across the intersection.", "type": "attribute", "weight": 2 }, { "content": "There are traffic lights and more traffic signs at the intersection.", "type": "attribute", "weight": 2 }, { "content": "On the left side, there is an advertisement board with the \"AEON MALL\" sign, indicating the direction to the shopping center.", "type": "attribute", "weight": 2 }, { "content": "On the right side, there is a restaurant with the \"GUSTO\" logo.", "type": "attribute", "weight": 2 } ] } ], "n_events": 5, "n_elements": 37 }, { "index": "TUNA_0976", "video_path": "CoVLA/2022-10-24--14-51-50--50_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a two-way road with a gloomy sky. There are utility poles and wires on both sides of the road, along with some shops and advertisement boards. Ahead, there is a cement truck. To the right, there is a black sedan. On the left sidewalk, there is a primary school student wearing a yellow hat and a blue and white school uniform, facing the camera, with a backpack.\nThe camera starts moving forward. The scenery on both sides of the road changes, with a newly appeared greenbelt separating the two-way lanes. On the left side of the road, there is a gas station with the \"ENEOS\" logo, and the display screen in front of the station flashes the fuel price. After passing the gas station, a small square appears on the left side of the road. There are many signs in front of the square indicating a parking lot, restaurant, and convenience store inside.\nThe camera pans to the left. At the turning point, there is a 7-Eleven convenience store advertisement board with an arrow pointing left. During the pan to the left, the 7-Eleven convenience store appears in the scene, with many vehicles parked in front of it. A black car is still driving towards the parking area. After completing the pan to the left, the camera continues panning forward. Ahead, there are two advertisement boards at the end of the road. The left advertisement board has an arrow pointing to the right. On both sides of the road, there are light-colored houses, and in the background, there are dark-colored houses with intersecting utility poles and wires.\nThe camera pans to the right. During the pan, white parking lines appear on the ground, with a dark car already parked in a parking spot. The camera finally stops in front of a brick wall building, with a \"No Smoking\" sign in front of it. The camera focuses on the building’s window, where lights and some equipment are visible inside.", "events": [ { "event": "At the beginning of the video, the camera focuses on a two-way road with a gloomy sky. There are utility poles and wires on both sides of the road, along with some shops and advertisement boards. Ahead, there is a cement truck. To the right, there is a black sedan. On the left sidewalk, there is a primary school student wearing a yellow hat and a blue and white school uniform, facing the camera, with a backpack.", "visual_elements": [ { "content": "The camera focuses on a two-way road.", "type": "camera", "weight": 3 }, { "content": "The sky is gloomy.", "type": "scene", "weight": 2 }, { "content": "There are utility poles and wires on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "There are some shops and advertisement boards on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "Ahead of the camera, there is a cement truck.", "type": "attribute", "weight": 1 }, { "content": "To the right, there is a black sedan.", "type": "attribute", "weight": 2 }, { "content": "On the left sidewalk, there is a primary school student wearing a yellow hat and a blue and white school uniform.", "type": "attribute", "weight": 2 }, { "content": "The student is facing the camera with a backpack.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera starts moving forward. The scenery on both sides of the road changes, with a newly appeared greenbelt separating the two-way lanes. On the left side of the road, there is a gas station with the \"ENEOS\" logo, and the display screen in front of the station flashes the fuel price. After passing the gas station, a small square appears on the left side of the road. There are many signs in front of the square indicating a parking lot, restaurant, and convenience store inside.", "visual_elements": [ { "content": "The camera starts moving forward.", "type": "camera", "weight": 3 }, { "content": "A newly appeared greenbelt separates the two-way lanes.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, there is a gas station with the \"ENEOS\" logo.", "type": "attribute", "weight": 2 }, { "content": "The display screen in front of the gas station flashes the fuel price.", "type": "attribute", "weight": 2 }, { "content": "After passing the gas station, a small square appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "There are many signs in front of the square, indicating a parking lot, restaurant, and convenience store inside.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans to the left. At the turning point, there is a 7-Eleven convenience store advertisement board with an arrow pointing left. During the pan to the left, the 7-Eleven convenience store appears in the scene, with many vehicles parked in front of it. A black car is still driving towards the parking area. After completing the pan to the left, the camera continues panning forward. Ahead, there are two advertisement boards at the end of the road. The left advertisement board has an arrow pointing to the right. On both sides of the road, there are light-colored houses, and in the background, there are dark-colored houses with intersecting utility poles and wires.", "visual_elements": [ { "content": "The camera pans left.", "type": "camera", "weight": 3 }, { "content": "At the turning point, there is a 7-Eleven convenience store advertisement board.", "type": "attribute", "weight": 2 }, { "content": "The advertisement board has an arrow pointing left.", "type": "attribute", "weight": 2 }, { "content": "During the pan to the left, a 7-Eleven convenience store appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "Many vehicles are parked in front of the convenience store.", "type": "attribute", "weight": 2 }, { "content": "A black car is still driving towards the parking area.", "type": "action", "weight": 2 }, { "content": "After completing the pan to the left, the camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "Ahead, there are two advertisement boards at the end of the road.", "type": "attribute", "weight": 2 }, { "content": "The left advertisement board has an arrow pointing to the right.", "type": "attribute", "weight": 2 }, { "content": "On both sides of the road, there are light-colored houses.", "type": "attribute", "weight": 2 }, { "content": "In the background, there are dark-colored houses and intersecting utility poles and wires.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans to the right. During the pan, white parking lines appear on the ground, with a dark car already parked in a parking spot. The camera finally stops in front of a brick wall building, with a \"No Smoking\" sign in front of it. The camera focuses on the building’s window, where lights and some equipment are visible inside.", "visual_elements": [ { "content": "The camera pans right.", "type": "camera", "weight": 3 }, { "content": "During the pan, white parking lines appear on the ground in the scene.", "type": "attribute", "weight": 2 }, { "content": "A dark-colored car is already parked in a parking spot.", "type": "attribute", "weight": 2 }, { "content": "The camera finally stops in front of a brick wall building.", "type": "camera", "weight": 3 }, { "content": "In front of the building, there is a \"No Smoking\" sign.", "type": "attribute", "weight": 2 }, { "content": "The camera focuses on the building’s window, where lights and some equipment are visible inside.", "type": "scene", "weight": 1 } ] } ], "n_events": 4, "n_elements": 31 }, { "index": "TUNA_0977", "video_path": "CoVLA/2022-10-24--15-52-05--20_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the start of the video, the camera is focused on a country road, with the sky slightly dim and covered with clouds. On both sides of the road are vast fields, with utility poles and wires extending along the road. In the right foreground, there is a teal-colored truck in the opposite lane. Behind the truck, there is a line of vehicles, some of which have their headlights on.\nThe camera begins to pan forward as the truck gradually passes by the camera, followed by the vehicles behind it. There is a speed limit sign of 50 on the road. Intermittent signposts are lined up on both sides of the road.\nThe camera continues to move forward, gradually approaching a black car ahead. The speed limit sign on the road changes from 50 to 40. The straight road turns into a curved one, and the fields on both sides of the road transform into houses. The camera lingers as the black car is about to navigate the curve. On the right side of the bend, the building's first floor is red, the second floor is white, and the roof is black. At the end of the video, a yellow junction sign is erected on the left side of the road.", "events": [ { "event": "At the start of the video, the camera is focused on a country road, with the sky slightly dim and covered with clouds. On both sides of the road are vast fields, with utility poles and wires extending along the road. In the right foreground, there is a teal-colored truck in the opposite lane. Behind the truck, there is a line of vehicles, some of which have their headlights on.", "visual_elements": [ { "content": "The camera is focused on a country road.", "type": "camera", "weight": 3 }, { "content": "The sky is dim and covered with clouds.", "type": "scene", "weight": 2 }, { "content": "On both sides of the road are vast fields.", "type": "scene", "weight": 2 }, { "content": "Utility poles and wires extend along the road.", "type": "scene", "weight": 2 }, { "content": "In the right foreground, there is a teal-colored truck in the opposite lane.", "type": "attribute", "weight": 2 }, { "content": "Behind the truck, there are several vehicles, some with their headlights on.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera begins to pan forward as the truck gradually passes by the camera, followed by the vehicles behind it. There is a speed limit sign of 50 on the road. Intermittent signposts are lined up on both sides of the road.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The truck gradually passes by the camera", "type": "action", "weight": 3 }, { "content": "The vehicle behind the truck gradually passes by the camera.", "type": "action", "weight": 3 }, { "content": "There is a speed limit sign of 50 on the road.", "type": "attribute", "weight": 2 }, { "content": "Intermittent signposts are lined up on both sides of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues to move forward, gradually approaching a black car ahead. The speed limit sign on the road changes from 50 to 40. The straight road turns into a curved one, and the fields on both sides of the road transform into houses. The camera lingers as the black car is about to navigate the curve. On the right side of the bend, the building's first floor is red, the second floor is white, and the roof is black. At the end of the video, a yellow junction sign is erected on the left side of the road.", "visual_elements": [ { "content": "The camera continues panning forward, gradually approaching a black car ahead.", "type": "camera", "weight": 3 }, { "content": "The speed limit sign on the road changes from 50 to 40.", "type": "attribute", "weight": 2 }, { "content": "The straight road turns into a curved road.", "type": "attribute", "weight": 2 }, { "content": "The fields on both sides of the road turn into houses.", "type": "attribute", "weight": 2 }, { "content": "The camera lingers as the black car ahead is about to navigate the curve.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the bend, the building’s first floor is red, the second floor is white, and the roof is black.", "type": "scene", "weight": 2 }, { "content": "At the end of the video, a yellow junction sign is erected on the left side of the road.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 18 }, { "index": "TUNA_0978", "video_path": "CoVLA/2022-10-26--15-11-29--55_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a two-lane road, with the glaring sunlight slanting in from the right. On the left side of the road, there are some low shrubs and trees. A few cars with their taillights on are ahead.\nThe camera begins to pan forward, and a black van enters from the right side of the scene. The camera pans to the right front and continues forward, following the black van. The two-lane road becomes a three-lane road, with arrows for left turn, straight, and right turn from left to right. The traffic light at the intersection ahead is green, and there are vehicles continuously coming from the opposite lane.\nAs the camera passes the intersection, two cars with right-turn signals are waiting to pass. The camera follows the black van, first passing a left-turn lane, then continuing straight. On the left side of the road, the scenery remains neat with trees and low shrubs. On the right side of the road, a greenhouse and a light-colored house appear.\nThe camera continues panning forward, and fields appear on both sides of the road. On the left side, a fence surrounds the fields, with posters featuring human portraits on the fence. A white truck moves off the right side of the scene in the right lane, with a silver truck behind it.", "events": [ { "event": "At the beginning of the video, the camera focuses on a two-lane road, with the glaring sunlight slanting in from the right. On the left side of the road, there are some low shrubs and trees. A few cars with their taillights on are ahead.", "visual_elements": [ { "content": "The camera focuses on a two-lane road.", "type": "camera", "weight": 3 }, { "content": "The glaring sunlight slants in from the right.", "type": "scene", "weight": 3 }, { "content": "On the left side of the road, there are some low shrubs and trees.", "type": "scene", "weight": 2 }, { "content": "Ahead, a few cars with their taillights on are visible.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera begins to pan forward, and a black van enters from the right side of the scene. The camera pans to the right front and continues forward, following the black van. The two-lane road becomes a three-lane road, with arrows for left turn, straight, and right turn from left to right. The traffic light at the intersection ahead is green, and there are vehicles continuously coming from the opposite lane.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "A black van enters from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The camera pans to the right front.", "type": "camera", "weight": 3 }, { "content": "The camera follows behind the black van and continues forward.", "type": "camera", "weight": 3 }, { "content": "The two-lane road becomes a three-lane road.", "type": "attribute", "weight": 2 }, { "content": "Arrows for left turn, straight, and right turn are painted on the lanes from left to right.", "type": "attribute", "weight": 2 }, { "content": "The traffic light at the intersection ahead is green.", "type": "attribute", "weight": 2 }, { "content": "Vehicles are continuously coming from the opposite lane.", "type": "action", "weight": 2 } ] }, { "event": "As the camera passes the intersection, two cars with right-turn signals are waiting to pass. The camera follows the black van, first passing a left-turn lane, then continuing straight. On the left side of the road, the scenery remains neat with trees and low shrubs. On the right side of the road, a greenhouse and a light-colored house appear.", "visual_elements": [ { "content": "As the camera passes the intersection, two cars with right-turn signals are waiting to pass.", "type": "attribute", "weight": 2 }, { "content": "The camera follows the black van, first passing a left-turn lane, then continuing straight.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road, the scenery remains neat with trees and low shrubs.", "type": "scene", "weight": 2 }, { "content": "On the right side of the road, a greenhouse and a light-colored house appear.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues panning forward, and fields appear on both sides of the road. On the left side, a fence surrounds the fields, with posters featuring human portraits on the fence. A white truck moves off the right side of the scene in the right lane, with a silver truck behind it.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Fields appear on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "On the left side, the fields are surrounded by a fence.", "type": "scene", "weight": 2 }, { "content": "Posters featuring human portraits are pasted on the fence.", "type": "scene", "weight": 2 }, { "content": "A white truck leaves the right side of the scene in the right lane.", "type": "action", "weight": 2 }, { "content": "A silver truck follows behind the white truck.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 22 }, { "index": "TUNA_0979", "video_path": "CoVLA/2022-10-27--12-02-59--67_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on an empty road with guardrails on both sides. On the left side, outside the guardrail, there are high-voltage power towers and power lines. In the distance, some mountains can be seen. The sky is overcast, with thick clouds.\nThe camera begins to pan forward, and the road starts to curve slightly. The guardrails on both sides of the road turn into fences, and at the end of the fenced section, there is a yellow warning sign on the left side.\nAfter leaving the fenced section, the camera first moves to the front-right and then continues panning forward. The guardrails on both sides of the road reappear, with the right side becoming a double guardrail, and some plants growing between the rails. Vehicles are moving on the road to the right of the double guardrail. Signboards and warning signs appear progressively on the left side of the road. At the end of the video, high-voltage power towers and power lines also appear on the right side of the road.", "events": [ { "event": "At the beginning of the video, the camera focuses on an empty road with guardrails on both sides. On the left side, outside the guardrail, there are high-voltage power towers and power lines. In the distance, some mountains can be seen. The sky is overcast, with thick clouds.", "visual_elements": [ { "content": "The camera focuses on an empty road.", "type": "camera", "weight": 3 }, { "content": "There are guardrails on both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "Outside the left guardrail, there are high-voltage power towers and power lines.", "type": "scene", "weight": 2 }, { "content": "Some mountains can be seen in the distance.", "type": "scene", "weight": 1 }, { "content": "The sky is overcast, with thick clouds.", "type": "scene", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and the road starts to curve slightly. The guardrails on both sides of the road turn into fences, and at the end of the fenced section, there is a yellow warning sign on the left side.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The road begins to curve slightly.", "type": "attribute", "weight": 2 }, { "content": "The guardrails on both sides of the road turn into fences.", "type": "attribute", "weight": 2 }, { "content": "At the end of the fenced section, there is a yellow warning sign on the left side.", "type": "attribute", "weight": 2 } ] }, { "event": "After leaving the fenced section, the camera first moves to the front-right and then continues panning forward. The guardrails on both sides of the road reappear, with the right side becoming a double guardrail, and some plants growing between the rails. Vehicles are moving on the road to the right of the double guardrail. Signboards and warning signs appear progressively on the left side of the road. At the end of the video, high-voltage power towers and power lines also appear on the right side of the road.", "visual_elements": [ { "content": "After leaving the fenced section, the camera first moves to the front-right and then continues panning forward.", "type": "camera", "weight": 3 }, { "content": "The guardrails on both sides of the road reappear in the scene.", "type": "attribute", "weight": 2 }, { "content": "The right guardrail turns into a double guardrail.", "type": "attribute", "weight": 2 }, { "content": "Some plants are growing between the rails.", "type": "attribute", "weight": 2 }, { "content": "Vehicles are moving on the road to the right of the double guardrail.", "type": "action", "weight": 2 }, { "content": "Signboards and warning signs appear progressively on the left side of the road.", "type": "scene", "weight": 2 }, { "content": "At the end of the video, high-voltage power towers and power lines also appear on the right side of the road.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 16 }, { "index": "TUNA_0980", "video_path": "CoVLA/2022-10-31--15-02-03--37_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a wide two-way lane, with clear weather and a bright blue sky. In the center of the scene, there are three cars: two white vans on the left and a small truck on the right. On the left side of the road, there is a gray two-story building and green trees. On the right side, there is a construction area with some industrial facilities and machinery.\nThe camera begins to pan forward, and two of the three cars start driving forward. The van on the far left is parked in a temporary parking area by the roadside. In front of the parked van, a black car is also stopped by the roadside. The road ahead is an uphill section.\nThe camera continues to move forward. The gray building on the left side of the road disappears from the scene, replaced by a green area. The construction area on the right side of the road turns into gray buildings. Vehicles in the opposite lane are driving downhill. On the road ahead, the left lane has arrows indicating both left and right turns, while the right lane has arrows indicating a right turn.\nAfter passing the uphill section, the camera continues to move forward. At the intersection ahead, the traffic light is red. The small truck and the van to its left gradually slow down and come to a stop. The camera slows down and moves forward, gradually approaching the rear of the small truck. The van's rear door displays the words “OKANO Express,” and the contents of the small truck's trunk are clearly visible. A black car passes through the intersection ahead.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wide two-way lane, with clear weather and a bright blue sky. In the center of the scene, there are three cars: two white vans on the left and a small truck on the right. On the left side of the road, there is a gray two-story building and green trees. On the right side, there is a construction area with some industrial facilities and machinery.", "visual_elements": [ { "content": "The camera focuses on a wide two-way lane.", "type": "camera", "weight": 3 }, { "content": "The weather is clear, and the sky is bright blue.", "type": "scene", "weight": 2 }, { "content": "In the center of the scene, there are three cars.", "type": "attribute", "weight": 3 }, { "content": "The two cars on the left are white vans.", "type": "attribute", "weight": 2 }, { "content": "The car on the right is a small truck.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, there is a gray two-story building and green trees.", "type": "scene", "weight": 2 }, { "content": "On the right side, there is a construction area with some industrial facilities and machinery.", "type": "scene", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and two of the three cars start driving forward. The van on the far left is parked in a temporary parking area by the roadside. In front of the parked van, a black car is also stopped by the roadside. The road ahead is an uphill section.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "Two of the three cars start driving forward.", "type": "action", "weight": 3 }, { "content": "The van on the far left is parked in a temporary parking area by the roadside.", "type": "attribute", "weight": 2 }, { "content": "In front of the parked van, a black car is also stopped by the roadside.", "type": "scene", "weight": 1 }, { "content": "The road ahead is an uphill section.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera continues to move forward. The gray building on the left side of the road disappears from the scene, replaced by a green area. The construction area on the right side of the road turns into gray buildings. Vehicles in the opposite lane are driving downhill. On the road ahead, the left lane has arrows indicating both left and right turns, while the right lane has arrows indicating a right turn.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The gray building on the left side of the road disappears from the scene, replaced by a green area.", "type": "attribute", "weight": 2 }, { "content": "The construction area on the right side of the road turns into gray buildings.", "type": "attribute", "weight": 2 }, { "content": "Vehicles in the opposite lane are driving downhill.", "type": "attribute", "weight": 2 }, { "content": "On the road ahead, the left lane has arrows indicating both left and right turns.", "type": "attribute", "weight": 2 }, { "content": "The right lane has arrows indicating a right turn.", "type": "attribute", "weight": 2 } ] }, { "event": "After passing the uphill section, the camera continues to move forward. At the intersection ahead, the traffic light is red. The small truck and the van to its left gradually slow down and come to a stop. The camera slows down and moves forward, gradually approaching the rear of the small truck. The van's rear door displays the words “OKANO Express,” and the contents of the small truck's trunk are clearly visible. A black car passes through the intersection ahead.", "visual_elements": [ { "content": "After passing the uphill section, the camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The traffic light at the intersection ahead is red.", "type": "attribute", "weight": 2 }, { "content": "The small truck and the van to its left gradually slow down and stop.", "type": "action", "weight": 3 }, { "content": "The camera slows down and moves forward, gradually approaching the rear of the small truck.", "type": "camera", "weight": 3 }, { "content": "The rear door of the van has the words \"OKANO Express\" on it.", "type": "attribute", "weight": 2 }, { "content": "The trunk of the small truck is filled with items.", "type": "attribute", "weight": 2 }, { "content": "A black car is passing through the intersection ahead.", "type": "action", "weight": 2 } ] } ], "n_events": 4, "n_elements": 25 }, { "index": "TUNA_0981", "video_path": "CoVLA/2022-11-10--10-34-30--132_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a wide road, following several vehicles from behind. In the center of the scene is a white sedan, in front of it is a large truck, and to the right of the scene is the front of a red flatbed truck. There are utility poles and some trees on both sides of the road. On the left side of the road, there is a section of white guardrail, behind which is a white billboard that reads \"BIG BOSS 1000.\"\nThe camera begins to pan forward, and the vehicles in the lanes start to move as well. The truck on the right side of the scene gradually becomes fully visible, with part of its flatbed covered by green tarpaulin. Soon, a red sedan appears in the lane to the left of the white sedan, moving forward. There is white text written on the far-left lane. Then, a blue car appears in the lane to the right of the white sedan, also moving forward. On the left side of the road, several billboards appear. A blue road sign is hanging above the road.\nThe red sedan overtakes the white sedan. The camera pans to the left front before resuming its forward movement, and the vehicle in front changes from the white sedan to the red car. On the left side of the road, a factory appears, with several cars parked at its entrance. On the right side of the road, there are still bushes.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wide road, following several vehicles from behind. In the center of the scene is a white sedan, in front of it is a large truck, and to the right of the scene is the front of a red flatbed truck. There are utility poles and some trees on both sides of the road. On the left side of the road, there is a section of white guardrail, behind which is a white billboard that reads \"BIG BOSS 1000.\"", "visual_elements": [ { "content": "The camera focuses on a wide road.", "type": "camera", "weight": 3 }, { "content": "The perspective follows several moving vehicles from behind.", "type": "camera", "weight": 3 }, { "content": "In the center of the scene, there is a white sedan.", "type": "attribute", "weight": 2 }, { "content": "In front of the sedan, there is a large truck.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the scene, there is the front of a red flatbed truck.", "type": "attribute", "weight": 2 }, { "content": "There are utility poles and some trees on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "On the left side of the road, there is a section of white guardrail.", "type": "scene", "weight": 2 }, { "content": "Behind the guardrail, there is a white billboard with the text \"BIG BOSS 1000\" on it.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and the vehicles in the lanes start to move as well. The truck on the right side of the scene gradually becomes fully visible, with part of its flatbed covered by green tarpaulin. Soon, a red sedan appears in the lane to the left of the white sedan, moving forward. There is white text written on the far-left lane. Then, a blue car appears in the lane to the right of the white sedan, also moving forward. On the left side of the road, several billboards appear. A blue road sign is hanging above the road.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The vehicles in the lanes also start to move forward.", "type": "action", "weight": 3 }, { "content": "The truck on the right side of the scene gradually becomes fully visible.", "type": "attribute", "weight": 2 }, { "content": "Part of the flatbed of the truck is covered with green tarpaulin.", "type": "attribute", "weight": 2 }, { "content": "Shortly after, a red sedan appears in the lane to the left of the white sedan, moving forward.", "type": "action", "weight": 3 }, { "content": "On the far-left lane, there is white text with a warning message.", "type": "attribute", "weight": 2 }, { "content": "Shortly after, a blue car appears in the lane to the right of the white sedan, moving forward.", "type": "action", "weight": 3 }, { "content": "Some billboards appear on the left side of the road.", "type": "scene", "weight": 2 }, { "content": "A blue road sign is hanging above the road.", "type": "attribute", "weight": 2 } ] }, { "event": "The red sedan overtakes the white sedan. The camera pans to the left front before resuming its forward movement, and the vehicle in front changes from the white sedan to the red car. On the left side of the road, a factory appears, with several cars parked at its entrance. On the right side of the road, there are still bushes.", "visual_elements": [ { "content": "The red sedan overtakes the white sedan.", "type": "action", "weight": 2 }, { "content": "The camera pans to the left front and then resumes moving forward.", "type": "camera", "weight": 3 }, { "content": "The vehicle ahead changes from the white sedan to the red car.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, a factory appears.", "type": "attribute", "weight": 2 }, { "content": "Several cars are parked in front of the factory.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road, there are still bushes.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0982", "video_path": "CoVLA/2022-11-17--10-27-07--131_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on an intersection. In the lanes beyond the intersection, several cars are visible, while in the opposite lanes at the other side of the intersection, two rows of vehicles are waiting to pass. The traffic light at the intersection is green.\nThe camera starts to move forward, and the vehicles begin to move slowly. On the left side of the road, there is a red brick building with some billboards in front of it. On the right side of the road, there is a gray building, which has the Mercedes-Benz logo on it.\nThe camera continues to move forward. On the left side of the road, there is a restaurant, and in the open area in front of the restaurant, two cars are parked. Beyond the restaurant is a residential neighborhood, with red-leafed trees scattered in front of the buildings. On the right side of the road, there is a large shopping center, with a sign at the entrance displaying several brand names. A line of vehicles is slowly moving forward on the road in front of the mall.\nThe camera continues to move forward. In front of a building on the left side of the road, there is a sign that says \"SALE.\" On the right side, the shopping center leaves the scene, replaced by a high-rise building, with each floor having green glass. The traffic light at the upcoming intersection is green. After passing the intersection, billboards are seen lining both sides of the road. Ahead, there is a horizontal elevated bridge. At the end of the video, a yellow road sign reading \"Reduced Vehicles\" appears on the right side of the scene.", "events": [ { "event": "At the beginning of the video, the camera focuses on an intersection. In the lanes beyond the intersection, several cars are visible, while in the opposite lanes at the other side of the intersection, two rows of vehicles are waiting to pass. The traffic light at the intersection is green.", "visual_elements": [ { "content": "The camera focuses on a crossroads.", "type": "camera", "weight": 3 }, { "content": "Several cars are in the lanes ahead after the intersection.", "type": "attribute", "weight": 2 }, { "content": "Two rows of vehicles are waiting to pass in the opposite lanes of the opposite intersection.", "type": "attribute", "weight": 2 }, { "content": "The traffic light at the intersection is green.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera starts to move forward, and the vehicles begin to move slowly. On the left side of the road, there is a red brick building with some billboards in front of it. On the right side of the road, there is a gray building, which has the Mercedes-Benz logo on it.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The vehicles begin to move forward slowly.", "type": "action", "weight": 3 }, { "content": "On the left side of the road, there is a red brick building.", "type": "attribute", "weight": 2 }, { "content": "There are some billboards in front of the building.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road, there is a gray building.", "type": "attribute", "weight": 2 }, { "content": "The building has a Mercedes-Benz logo on it.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. On the left side of the road, there is a restaurant, and in the open area in front of the restaurant, two cars are parked. Beyond the restaurant is a residential neighborhood, with red-leafed trees scattered in front of the buildings. On the right side of the road, there is a large shopping center, with a sign at the entrance displaying several brand names. A line of vehicles is slowly moving forward on the road in front of the mall.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road, there is a restaurant.", "type": "attribute", "weight": 2 }, { "content": "Two cars are parked in the open space in front of the restaurant.", "type": "attribute", "weight": 1 }, { "content": "In front of the restaurant is a residential area.", "type": "attribute", "weight": 2 }, { "content": "There are red-leafed trees in front of the residential buildings.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road, there is a large shopping center.", "type": "attribute", "weight": 2 }, { "content": "The entrance sign displays multiple brand names.", "type": "attribute", "weight": 2 }, { "content": "A line of vehicles is slowly moving forward on the road in front of the mall.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. In front of a building on the left side of the road, there is a sign that says \"SALE.\" On the right side, the shopping center leaves the scene, replaced by a high-rise building, with each floor having green glass. The traffic light at the upcoming intersection is green. After passing the intersection, billboards are seen lining both sides of the road. Ahead, there is a horizontal elevated bridge. At the end of the video, a yellow road sign reading \"Reduced Vehicles\" appears on the right side of the scene.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "There is a sign reading \"SALE\" in front of the building on the left side of the road.", "type": "attribute", "weight": 1 }, { "content": "The shopping center disappears from the right side of the scene, replaced by a high-rise building.", "type": "attribute", "weight": 2 }, { "content": "Each floor of the building has green-tinted glass.", "type": "attribute", "weight": 2 }, { "content": "The traffic light at the upcoming intersection is green.", "type": "attribute", "weight": 2 }, { "content": "After passing the intersection, billboards hang on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "There is also a horizontal overpass ahead.", "type": "scene", "weight": 2 }, { "content": "At the end of the video, a yellow road sign reading \"Reduced Vehicles\" appears on the right side of the scene.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 26 }, { "index": "TUNA_0983", "video_path": "CoVLA/2022-11-18--11-55-41--134_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focused on a wide highway, with the perspective from the driver's point of view, extending forward. The sky in the distance is filled with clouds. The sky is slightly dark. On the left side of the road, there is a guardrail with orange reflective markers. On the right side of the road, there is a row of road posts with yellow and black signs. On the road to the right of the posts, there are white arrows painted, pointing toward the rear of the camera. The right side of this road has a row of orange and white road posts. The road surface has white lane markings. Tall streetlights are lined up on both sides of the entire road.\nThe camera pans forward, and the signs on the left side of the road gradually appear. First, there is a yellow warning sign indicating that the road ahead merges. Then, multiple blue and red traffic signs appear on the right side of the road, indicating vehicle directions and restrictions. The guardrail on the left side of the road turns into a row of orange and white road posts, with yellow road barriers crossing between the posts. Then, the road posts disappear from the scene, and the scene shows three lanes, with the posts on the right side turning into a guardrail.\nThe camera pans to the left front, shifting from the far-right lane to the middle lane. On the left side of the road, there is a speed limit sign that is continuously flashing, displaying a speed limit of 80 kilometers per hour. Below the number, there is also a yellow arrow sign indicating vehicles should move to the right. Vehicles are traveling in the opposite lanes.", "events": [ { "event": "The video begins with the camera focused on a wide highway, with the perspective from the driver's point of view, extending forward. The sky in the distance is filled with clouds. The sky is slightly dark. On the left side of the road, there is a guardrail with orange reflective markers. On the right side of the road, there is a row of road posts with yellow and black signs. On the road to the right of the posts, there are white arrows painted, pointing toward the rear of the camera. The right side of this road has a row of orange and white road posts. The road surface has white lane markings. Tall streetlights are lined up on both sides of the entire road.", "visual_elements": [ { "content": "The camera focuses on a wide highway.", "type": "camera", "weight": 3 }, { "content": "The perspective is from the driver's point of view, extending forward.", "type": "camera", "weight": 3 }, { "content": "The sky in the distance is filled with clouds.", "type": "scene", "weight": 2 }, { "content": "The sky is slightly dark.", "type": "scene", "weight": 2 }, { "content": "There is a guardrail on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "The guardrail has orange reflective markers.", "type": "attribute", "weight": 1 }, { "content": "On the right side of the road, there is a row of traffic cones.", "type": "attribute", "weight": 2 }, { "content": "The traffic cones have yellow and black markers on them.", "type": "attribute", "weight": 2 }, { "content": "On the road to the right of the traffic cones, white arrows are painted, pointing behind the camera.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road is a row of orange and white traffic cones.", "type": "attribute", "weight": 2 }, { "content": "There are white lane markings on the road.", "type": "attribute", "weight": 2 }, { "content": "Tall streetlights line both sides of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans forward, and the signs on the left side of the road gradually appear. First, there is a yellow warning sign indicating that the road ahead merges. Then, multiple blue and red traffic signs appear on the right side of the road, indicating vehicle directions and restrictions. The guardrail on the left side of the road turns into a row of orange and white road posts, with yellow road barriers crossing between the posts. Then, the road posts disappear from the scene, and the scene shows three lanes, with the posts on the right side turning into a guardrail.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road, a yellow warning sign appears, indicating that the road ahead merges.", "type": "attribute", "weight": 2 }, { "content": "Then, multiple blue and red traffic signs appear on the right side of the road, indicating directions and restrictions for vehicles.", "type": "attribute", "weight": 2 }, { "content": "The guardrail on the left side of the road turns into a row of orange and white traffic cones.", "type": "attribute", "weight": 2 }, { "content": "Yellow road barriers are interspersed between the traffic cones.", "type": "attribute", "weight": 2 }, { "content": "The traffic cones then disappear from the scene.", "type": "attribute", "weight": 1 }, { "content": "The entire scene now shows three lanes.", "type": "attribute", "weight": 2 }, { "content": "The traffic cones on the right side turn into a guardrail.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans to the left front, shifting from the far-right lane to the middle lane. On the left side of the road, there is a speed limit sign that is continuously flashing, displaying a speed limit of 80 kilometers per hour. Below the number, there is also a yellow arrow sign indicating vehicles should move to the right. Vehicles are traveling in the opposite lanes.", "visual_elements": [ { "content": "The camera pans to the left front, switching from the far right lane to the middle lane.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road, a flashing speed limit sign shows a limit of 80 kilometers per hour.", "type": "attribute", "weight": 2 }, { "content": "Below the number, there is a yellow arrow sign indicating that vehicles should move to the right.", "type": "attribute", "weight": 1 }, { "content": "Vehicles are moving in the opposite lane.", "type": "action", "weight": 2 } ] } ], "n_events": 3, "n_elements": 24 }, { "index": "TUNA_0984", "video_path": "CoVLA/2022-11-18--11-55-41--30_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a wide highway. Tall transmission towers line both sides of the road, with wires crisscrossing between the towers, extending to the distant horizon. The sky is clear, with white clouds accumulating in the distance. Guardrails are present on both sides of the highway. Ahead on the left, part of the guardrail has been equipped with a mesh. Behind the left guardrail, there are trees and bushes.\nThe camera pans forward, and vehicles are seen driving on the highway ahead. A black sedan enters from the right side of the scene, traveling in the far-right lane. The black sedan moves quickly, then changes lanes to the middle lane. The transmission tower on the right side of the road leaves the scene. Then, two white sedans enter one by one from the right side of the scene into the far-right lane, just like the black sedan. The second white sedan signals left and changes lanes into the middle lane, following the black sedan. At this moment, another black sedan enters from the right side of the scene. In the opposite lane to the right of the guardrail, two trucks are driving side by side.", "events": [ { "event": "At the beginning of the video, the camera focuses on a wide highway. Tall transmission towers line both sides of the road, with wires crisscrossing between the towers, extending to the distant horizon. The sky is clear, with white clouds accumulating in the distance. Guardrails are present on both sides of the highway. Ahead on the left, part of the guardrail has been equipped with a mesh. Behind the left guardrail, there are trees and bushes.", "visual_elements": [ { "content": "The camera focuses on a wide highway.", "type": "camera", "weight": 3 }, { "content": "Tall transmission towers line both sides of the road.", "type": "scene", "weight": 2 }, { "content": "Wires crisscross between the towers.", "type": "scene", "weight": 2 }, { "content": "The sky is clear, with white clouds accumulating in the distance.", "type": "scene", "weight": 2 }, { "content": "Guardrails are present on both sides of the highway.", "type": "scene", "weight": 2 }, { "content": "Ahead on the left, part of the guardrail has been equipped with a mesh.", "type": "scene", "weight": 2 }, { "content": "Behind the left guardrail, there are trees and bushes.", "type": "scene", "weight": 2 } ] }, { "event": "The camera pans forward, and vehicles are seen driving on the highway ahead. A black sedan enters from the right side of the scene, traveling in the far-right lane. The black sedan moves quickly, then changes lanes to the middle lane. The transmission tower on the right side of the road leaves the scene. Then, two white sedans enter one by one from the right side of the scene into the far-right lane, just like the black sedan. The second white sedan signals left and changes lanes into the middle lane, following the black sedan. At this moment, another black sedan enters from the right side of the scene. In the opposite lane to the right of the guardrail, two trucks are driving side by side.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "Vehicles are driving on the highway ahead.", "type": "action", "weight": 2 }, { "content": "A black sedan enters from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The black sedan is driving in the far-right lane.", "type": "attribute", "weight": 2 }, { "content": "The black sedan moves quickly, then changes lanes to the middle lane.", "type": "action", "weight": 3 }, { "content": "The transmission tower on the right side of the road leaves the scene.", "type": "scene", "weight": 2 }, { "content": "Then, two white sedans enter one by one from the right side of the scene into the far-right lane, just like the black sedan.", "type": "action", "weight": 3 }, { "content": "The second white sedan signals left and changes lanes into the middle lane, following the black sedan.", "type": "action", "weight": 3 }, { "content": "At this moment, another black sedan enters from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "In the opposite lane to the right of the guardrail, two trucks are driving side by side.", "type": "action", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0985", "video_path": "CoVLA/2022-11-21--13-20-46--25_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the start of the video, the camera faces the road ahead. In front is a white Toyota Probox, with its license plate covered. There are vehicles parked in both the lanes ahead of and to the right of the Toyota. The traffic light at the intersection ahead is red. On the left side of the road, there is a guardrail and some weeds, with an open area behind the guardrail where several cars are parked. There is also a 7-Eleven convenience store billboard in the area. In the distance, some trees and utility poles can be seen. There are some clouds in the sky, and the lighting is relatively bright. Above the road, there are blue and green traffic signs.\nThe traffic light ahead changes from red to green, and the vehicles begin to move slowly. The camera begins to slowly move forward. A truck's front end enters from the right side of the scene before leaving again. The truck on the right side is moving slower than the lane on the left. The right lane has a straight-ahead symbol, while the left lane has a symbol indicating both straight and left turn. After passing the intersection, the left side of the road is lined with trees, and the right side has some buildings. In the background, there are both horizontal and partial circular overpasses.\nThe Toyota ahead signals a left turn and turns left, with the camera immediately rotating to the left. The road after the turn is a two-lane road. The Toyota is in the left lane, and in the right lane, a row of vehicles is waiting to turn right. On the left side of the road, the sidewalk has a white guardrail installed. There are utility poles and wires on both sides of the road. On the left side of the road, there are dense trees.", "events": [ { "event": "At the start of the video, the camera faces the road ahead. In front is a white Toyota Probox, with its license plate covered. There are vehicles parked in both the lanes ahead of and to the right of the Toyota. The traffic light at the intersection ahead is red. On the left side of the road, there is a guardrail and some weeds, with an open area behind the guardrail where several cars are parked. There is also a 7-Eleven convenience store billboard in the area. In the distance, some trees and utility poles can be seen. There are some clouds in the sky, and the lighting is relatively bright. Above the road, there are blue and green traffic signs.", "visual_elements": [ { "content": "The camera is facing the road ahead.", "type": "camera", "weight": 3 }, { "content": "In front is a white Toyota Probox.", "type": "attribute", "weight": 2 }, { "content": "The license plate is covered.", "type": "attribute", "weight": 2 }, { "content": "There are vehicles parked in both the lanes ahead of and to the right of the Toyota.", "type": "attribute", "weight": 2 }, { "content": "The traffic light at the intersection ahead is red.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, there is a guardrail and some weeds.", "type": "attribute", "weight": 2 }, { "content": "Behind the guardrail, there is an open area.", "type": "attribute", "weight": 2 }, { "content": "Several cars are parked in the open area.", "type": "attribute", "weight": 1 }, { "content": "There is also a 7-Eleven convenience store billboard in the area.", "type": "attribute", "weight": 2 }, { "content": "In the distance, some trees and utility poles can be seen.", "type": "scene", "weight": 2 }, { "content": "There are some clouds in the sky, and the lighting is relatively bright.", "type": "scene", "weight": 2 }, { "content": "Above the road, there are blue and green traffic signs.", "type": "scene", "weight": 2 } ] }, { "event": "The traffic light ahead changes from red to green, and the vehicles begin to move slowly. The camera begins to slowly move forward. A truck's front end enters from the right side of the scene before leaving again. The truck on the right side is moving slower than the lane on the left. The right lane has a straight-ahead symbol, while the left lane has a symbol indicating both straight and left turn. After passing the intersection, the left side of the road is lined with trees, and the right side has some buildings. In the background, there are both horizontal and partial circular overpasses.", "visual_elements": [ { "content": "The traffic light ahead changes from red to green.", "type": "attribute", "weight": 2 }, { "content": "The vehicles begin to move slowly.", "type": "action", "weight": 3 }, { "content": "The camera begins to slowly move forward.", "type": "camera", "weight": 3 }, { "content": "A truck’s front end enters from the right side of the scene before leaving again.", "type": "action", "weight": 3 }, { "content": "The truck on the right side is moving slower than the lane on the left.", "type": "attribute", "weight": 2 }, { "content": "The right lane has a straight-ahead symbol.", "type": "attribute", "weight": 2 }, { "content": "The left lane has a symbol indicating both straight and left turn.", "type": "attribute", "weight": 2 }, { "content": "After passing the intersection, the left side of the road is lined with trees, and the right side has some buildings.", "type": "scene", "weight": 2 }, { "content": "In the background, there are both horizontal and partial circular overpasses.", "type": "scene", "weight": 2 } ] }, { "event": "The Toyota ahead signals a left turn and turns left, with the camera immediately rotating to the left. The road after the turn is a two-lane road. The Toyota is in the left lane, and in the right lane, a row of vehicles is waiting to turn right. On the left side of the road, the sidewalk has a white guardrail installed. There are utility poles and wires on both sides of the road. On the left side of the road, there are dense trees.", "visual_elements": [ { "content": "The Toyota ahead signals a left turn and turns left.", "type": "action", "weight": 3 }, { "content": "The camera immediately rotates to the left.", "type": "camera", "weight": 3 }, { "content": "The road after the turn is a two-lane road.", "type": "attribute", "weight": 2 }, { "content": "The Toyota is in the left lane.", "type": "attribute", "weight": 2 }, { "content": "In the right lane, a row of vehicles is waiting to turn right.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, the sidewalk has a white guardrail installed.", "type": "scene", "weight": 2 }, { "content": "There are utility poles and wires on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "On the left side of the road, there are dense trees.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 29 }, { "index": "TUNA_0986", "video_path": "CoVLA/2022-11-25--13-07-38--110_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focused on a smooth rural road. On the left side of the road, there are utility poles and some shrubs, while the right side has a grassy hillside. In the distance, a black sedan can be seen driving on the road. The sky is clear, and the sunlight is bright.\nThe camera begins to pan forward. The black sedan drives ahead, and on the left side of the road, a fenced area with some equipment appears. As the camera pans forward, the area fades out of the scene. A lake appears on the left side of the road, with a utility pole by the lake displaying a speed limit sign of 40.\nThe camera continues to move forward. The lake disappears from the scene, and a blue house appears on the left side of the road. The black sedan ahead slows down to pass a white car coming from the opposite direction, and the white car then exits the scene from the right.\nThe camera continues to move forward. First, a dense shrubbery appears on the left side of the road, followed by a lake. Next to the lake is a greenhouse partially covered with blue plastic sheets. A short stretch of white guardrail is visible on the left side of the road. In the background, some houses can be seen on the left side of the road ahead.", "events": [ { "event": "The video begins with the camera focused on a smooth rural road. On the left side of the road, there are utility poles and some shrubs, while the right side has a grassy hillside. In the distance, a black sedan can be seen driving on the road. The sky is clear, and the sunlight is bright.", "visual_elements": [ { "content": "The camera is focused on a smooth rural road.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road, there are utility poles and some shrubs.", "type": "scene", "weight": 2 }, { "content": "On the right side, there is a grassy hillside.", "type": "scene", "weight": 2 }, { "content": "In the distance, a black sedan can be seen.", "type": "attribute", "weight": 2 }, { "content": "The sky is clear, and the sunlight is bright.", "type": "scene", "weight": 2 } ] }, { "event": "The camera begins to pan forward. The black sedan drives ahead, and on the left side of the road, a fenced area with some equipment appears. As the camera pans forward, the area fades out of the scene. A lake appears on the left side of the road, with a utility pole by the lake displaying a speed limit sign of 40.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The black sedan is driving ahead.", "type": "action", "weight": 2 }, { "content": "On the left side of the road, a fenced area appears.", "type": "attribute", "weight": 2 }, { "content": "The area contains some equipment.", "type": "attribute", "weight": 1 }, { "content": "The area moves out of the scene as the camera pans forward.", "type": "attribute", "weight": 2 }, { "content": "A lake appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "A utility pole by the lake has a speed limit sign showing 40.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues to move forward. The lake disappears from the scene, and a blue house appears on the left side of the road. The black sedan ahead slows down to pass a white car coming from the opposite direction, and the white car then exits the scene from the right.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "The lake disappears from the scene.", "type": "scene", "weight": 2 }, { "content": "On the left side of the road, there is a blue house.", "type": "scene", "weight": 2 }, { "content": "The black sedan ahead slows down to pass a white car coming from the opposite direction.", "type": "action", "weight": 3 }, { "content": "The white car then exits the scene from the right side.", "type": "action", "weight": 2 } ] }, { "event": "The camera continues to move forward. First, a dense shrubbery appears on the left side of the road, followed by a lake. Next to the lake is a greenhouse partially covered with blue plastic sheets. A short stretch of white guardrail is visible on the left side of the road. In the background, some houses can be seen on the left side of the road ahead.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "First, a dense shrubbery appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "Then, another lake appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "Next to the lake is a greenhouse partially covered with blue plastic sheets.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, there is a short stretch of white guardrail.", "type": "attribute", "weight": 2 }, { "content": "In the background, some houses can be seen on the left side of the road ahead.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 23 }, { "index": "TUNA_0987", "video_path": "CoVLA/2022-12-02--15-31-01--16_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera is focused on a wide road, with the perspective taken from inside the car, facing forward. There are several vehicles ahead, with a red sedan in front of the camera. To the left front of the red sedan, there is a small yellow car. In front of the yellow car, there is a large white truck. To the left rear of the red sedan, there is a black car. On the opposite lane to the right of the red sedan, there are various types of vehicles. Above the left side of the road, there is a blue road sign. On both sides of the road, there are trees and utility poles, and the sky is filled with clouds.\nThe camera begins to pan forward, and the vehicles on the lanes start driving. The black car, yellow car, and white truck in the left lane gradually move out of the scene from the left side. A gas station appears on the left side of the road, and the display screen in front of the gas station shows the fuel price. In front of the gas station, three people are riding bicycles toward the camera.\nThe red sedan ahead signals a left turn and changes lanes, continuing forward in the left lane. The camera then pans to the left front, following behind the red sedan. On the left side of the road, there is a field. The red sedan gradually overtakes the white truck in the right lane that was beside it. There is a gantry on the road, with cameras installed on it. On the left side of the road ahead, there are some houses, and on the right side of the road, there is a large red billboard with the letter \"M\" on it.", "events": [ { "event": "At the beginning of the video, the camera is focused on a wide road, with the perspective taken from inside the car, facing forward. There are several vehicles ahead, with a red sedan in front of the camera. To the left front of the red sedan, there is a small yellow car. In front of the yellow car, there is a large white truck. To the left rear of the red sedan, there is a black car. On the opposite lane to the right of the red sedan, there are various types of vehicles. Above the left side of the road, there is a blue road sign. On both sides of the road, there are trees and utility poles, and the sky is filled with clouds.", "visual_elements": [ { "content": "The camera is focused on a wide road.", "type": "camera", "weight": 3 }, { "content": "The view is from inside the car, facing forward.", "type": "camera", "weight": 3 }, { "content": "In front of the camera, there is a red sedan.", "type": "attribute", "weight": 2 }, { "content": "To the left front of the red sedan, there is a yellow compact car.", "type": "attribute", "weight": 2 }, { "content": "In front of the yellow car, there is a white large truck.", "type": "attribute", "weight": 2 }, { "content": "To the left rear of the red sedan, there is a black car.", "type": "attribute", "weight": 2 }, { "content": "On the opposite lane to the right of the red sedan, there are various types of vehicles.", "type": "attribute", "weight": 2 }, { "content": "Above the left side of the road, there is a blue road sign.", "type": "attribute", "weight": 2 }, { "content": "There are some trees and utility poles on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "The sky is filled with clouds.", "type": "scene", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and the vehicles on the lanes start driving. The black car, yellow car, and white truck in the left lane gradually move out of the scene from the left side. A gas station appears on the left side of the road, and the display screen in front of the gas station shows the fuel price. In front of the gas station, three people are riding bicycles toward the camera.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "Vehicles on the lanes start moving.", "type": "action", "weight": 3 }, { "content": "The black car, yellow car, and white truck in the left lane gradually move out of the scene on the left.", "type": "action", "weight": 2 }, { "content": "A gas station appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "The display screen in front of the gas station shows the fuel prices.", "type": "attribute", "weight": 1 }, { "content": "On the road in front of the gas station, three people on bicycles are riding towards the camera.", "type": "action", "weight": 2 } ] }, { "event": "The red sedan ahead signals a left turn and changes lanes, continuing forward in the left lane. The camera then pans to the left front, following behind the red sedan. On the left side of the road, there is a field. The red sedan gradually overtakes the white truck in the right lane that was beside it. There is a gantry on the road, with cameras installed on it. On the left side of the road ahead, there are some houses, and on the right side of the road, there is a large red billboard with the letter \"M\" on it.", "visual_elements": [ { "content": "The red sedan ahead signals a left turn and changes lanes, continuing to move forward in the left lane.", "type": "action", "weight": 3 }, { "content": "The camera also pans to the left front, following behind the red sedan.", "type": "camera", "weight": 3 }, { "content": "On the left side of the road, there is a field.", "type": "scene", "weight": 2 }, { "content": "The red sedan gradually overtakes the white truck in the right lane.", "type": "action", "weight": 3 }, { "content": "There is a gantry on the road.", "type": "attribute", "weight": 2 }, { "content": "The gantry has cameras installed on it.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road ahead, there are some houses.", "type": "attribute", "weight": 2 }, { "content": "On the right side of the road, there is a large red billboard with the letter \"M\" on it.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 24 }, { "index": "TUNA_0988", "video_path": "CoVLA/2022-12-02--16-20-35--27_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "In the video, the scene shows a nighttime traffic situation. The camera is focused on an orange Volvo sedan, with its license plate blurred. The brake lights and tail lights of the Volvo are on, indicating that the vehicle is stationary. The right turn signal of the Volvo is flashing continuously. In front of the Volvo, there is a large cement mixer truck. A flatbed truck is parked in the left lane, loaded with two white cars. Vehicles are continuously moving in the right lane. The traffic signal at the intersection ahead shows a red light.\nThe traffic signal ahead turns green. Vehicles in the left lane begin to slowly move forward, while the cement mixer truck and the Volvo remain stationary. Vehicles in both the left and right lanes begin to move freely. The Volvo in front starts to move forward slightly, then brakes and comes to a stop. The camera follows by moving forward and then halting. On the right side of the intersection, there are several buildings, one of which has a glowing sign shaped like a leaf.", "events": [ { "event": "In the video, the scene shows a nighttime traffic situation. The camera is focused on an orange Volvo sedan, with its license plate blurred. The brake lights and tail lights of the Volvo are on, indicating that the vehicle is stationary. The right turn signal of the Volvo is flashing continuously. In front of the Volvo, there is a large cement mixer truck. A flatbed truck is parked in the left lane, loaded with two white cars. Vehicles are continuously moving in the right lane. The traffic signal at the intersection ahead shows a red light.", "visual_elements": [ { "content": "The scene shows a nighttime traffic situation.", "type": "attribute", "weight": 3 }, { "content": "In front of the camera is an orange Volvo sedan.", "type": "attribute", "weight": 3 }, { "content": "The license plate is blurred.", "type": "attribute", "weight": 2 }, { "content": "The tail lights and brake lights of the Volvo are on, indicating that the vehicle is stationary.", "type": "attribute", "weight": 2 }, { "content": "The right turn signal of the Volvo is continuously flashing.", "type": "attribute", "weight": 2 }, { "content": "In front of the Volvo, there is a large cement mixer truck.", "type": "attribute", "weight": 2 }, { "content": "A flatbed truck is parked in the left lane.", "type": "attribute", "weight": 2 }, { "content": "The flatbed truck is loaded with two white cars.", "type": "attribute", "weight": 2 }, { "content": "Vehicles are continuously moving in the right lane.", "type": "action", "weight": 2 }, { "content": "The traffic signal at the intersection ahead shows a red light.", "type": "attribute", "weight": 2 } ] }, { "event": "The traffic signal ahead turns green. Vehicles in the left lane begin to slowly move forward, while the cement mixer truck and the Volvo remain stationary. Vehicles in both the left and right lanes begin to move freely. The Volvo in front starts to move forward slightly, then brakes and comes to a stop. The camera follows by moving forward and then halting. On the right side of the intersection, there are several buildings, one of which has a glowing sign shaped like a leaf.", "visual_elements": [ { "content": "The traffic signal ahead turns green.", "type": "attribute", "weight": 3 }, { "content": "Vehicles in the left lane begin to slowly move forward.", "type": "action", "weight": 3 }, { "content": "The cement mixer truck and the Volvo in front remain stationary.", "type": "attribute", "weight": 2 }, { "content": "Vehicles in both the left and right lanes begin to move freely.", "type": "attribute", "weight": 2 }, { "content": "The Volvo in front starts to move forward slightly, then brakes and comes to a stop.", "type": "action", "weight": 3 }, { "content": "The camera follows by moving forward and then halting.", "type": "action", "weight": 3 }, { "content": "On the right side of the intersection, there are several buildings, one of which has a glowing sign shaped like a leaf.", "type": "scene", "weight": 2 } ] } ], "n_events": 2, "n_elements": 17 }, { "index": "TUNA_0989", "video_path": "CoVLA/2022-12-05--10-17-04--48_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the start of the video, the camera faces an intersection with a gloomy sky. On the left side of the road, there is a green grass slope, while the right side features some buildings and utility poles. The traffic light ahead is green. In the scene, a white truck is seen at the opposite intersection with its left turn signal on. White diverging lines are painted on the road at the intersection.\nThe camera begins to pan forward, and the truck gradually drives out of the scene from the right side. A white guardrail appears on the right side of the road. The buildings behind the guardrail are positioned at a lower elevation than the road. On the left side of the road, some traffic signs can be seen, warning drivers to pay attention to the road conditions ahead.\nThe camera continues panning forward, and a fork in the road appears ahead. The left side still has a grass slope, while the fork on the right leads to two downhill roads in opposite directions, allowing vehicles to reach the buildings below. Four cars pass by on the opposite lane, with each leaving the scene from the right side. Ahead, the road curves, and on the left side of the slope, there is a blue road sign. In the distance, a white multi-story building is visible.", "events": [ { "event": "At the start of the video, the camera faces an intersection with a gloomy sky. On the left side of the road, there is a green grass slope, while the right side features some buildings and utility poles. The traffic light ahead is green. In the scene, a white truck is seen at the opposite intersection with its left turn signal on. White diverging lines are painted on the road at the intersection.", "visual_elements": [ { "content": "The camera faces an intersection.", "type": "camera", "weight": 3 }, { "content": "The sky is overcast.", "type": "scene", "weight": 2 }, { "content": "On the left side of the road, there is a green grass slope.", "type": "scene", "weight": 2 }, { "content": "On the right side, there are some buildings and utility poles.", "type": "scene", "weight": 1 }, { "content": "The traffic light ahead is green.", "type": "attribute", "weight": 2 }, { "content": "In the scene, a white truck can be seen at the opposite intersection with its left turn signal on.", "type": "attribute", "weight": 2 }, { "content": "White diverging lines are painted on the road at the intersection.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and the truck gradually drives out of the scene from the right side. A white guardrail appears on the right side of the road. The buildings behind the guardrail are positioned at a lower elevation than the road. On the left side of the road, some traffic signs can be seen, warning drivers to pay attention to the road conditions ahead.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The truck gradually drives out of the scene from the right side.", "type": "action", "weight": 3 }, { "content": "A white guardrail appears on the right side of the road.", "type": "scene", "weight": 2 }, { "content": "The buildings behind the guardrail are positioned at a lower elevation than the road.", "type": "scene", "weight": 2 }, { "content": "On the left side of the road, some traffic signs can be seen, warning drivers to pay attention to the road conditions ahead.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues panning forward, and a fork in the road appears ahead. The left side still has a grass slope, while the fork on the right leads to two downhill roads in opposite directions, allowing vehicles to reach the buildings below. Four cars pass by on the opposite lane, with each leaving the scene from the right side. Ahead, the road curves, and on the left side of the slope, there is a blue road sign. In the distance, a white multi-story building is visible.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "Ahead, a fork in the road appears.", "type": "attribute", "weight": 2 }, { "content": "The left side of the road still has a grass slope.", "type": "attribute", "weight": 2 }, { "content": "The fork on the right leads to two opposite downhill road segments, allowing vehicles to reach the buildings below.", "type": "attribute", "weight": 1 }, { "content": "Four cars pass by on the opposite lane, with each leaving the scene from the right side.", "type": "action", "weight": 3 }, { "content": "Ahead, the road curves.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the slope, there is a blue road sign.", "type": "scene", "weight": 2 }, { "content": "In the distance, a white multi-story building is visible.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0990", "video_path": "CoVLA/2022-12-06--15-03-31--13_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera facing an urban highway under a dim sky. In the center of the scene is a white small van, its license plate obscured. The vehicle is driving in the far left lane, with a white truck in front of it on the right, followed by a black sedan behind the truck. On the left side of the road, there is a tall soundproof wall, and on the right, a guardrail separates the two-way lanes. To the right of the guardrail, there is a small truck loaded with goods. In the background, a high-rise building is visible on the left side of the road.\nThe camera begins to pan forward, and the vehicles on the lanes also begin to move ahead. Above the road, a traffic gantry appears, with several cameras and a sign reading \"ETC.\" To the left of the gantry, there is an electronic display showing red text warning drivers to watch their speed and avoid tailgating. On the ground, yellow speed limit signs are painted, indicating a speed limit of 40 kilometers per hour. The road is marked with orange patches to serve as a warning.\nThe camera continues panning forward. Ahead, the road forks, adding more lanes, and a toll station appears. Above each lane at the toll station, there are indicator lights in different colors, continuously flashing. To the left of the toll station entrance, there is a green sign showing the locations of the restroom and parking lot.\nThe camera continues panning forward. The white van enters the second toll lane from the right, while the truck and black sedan enter the first lane on the right. The display screens above the two lanes on the right show \"ETC Only,\" and the ground is marked with blue and white symbols. The vehicles smoothly pass through the toll station and enter a three-lane road. The white van is in the middle lane, with a truck ahead of it, a black sedan in the far left lane, and a silver van in the right lane. In the background, high-rise buildings are visible on both sides of the road.", "events": [ { "event": "The video begins with the camera facing an urban highway under a dim sky. In the center of the scene is a white small van, its license plate obscured. The vehicle is driving in the far left lane, with a white truck in front of it on the right, followed by a black sedan behind the truck. On the left side of the road, there is a tall soundproof wall, and on the right, a guardrail separates the two-way lanes. To the right of the guardrail, there is a small truck loaded with goods. In the background, a high-rise building is visible on the left side of the road.", "visual_elements": [ { "content": "The camera focuses on an urban highway.", "type": "camera", "weight": 3 }, { "content": "The sky is dim.", "type": "scene", "weight": 2 }, { "content": "In the center of the scene, there is a white small van.", "type": "attribute", "weight": 3 }, { "content": "The license plate is obscured.", "type": "attribute", "weight": 1 }, { "content": "The vehicle is driving in the far left lane.", "type": "attribute", "weight": 2 }, { "content": "To the right front of the white van, there is a white truck.", "type": "attribute", "weight": 2 }, { "content": "Behind the truck, there is a black sedan.", "type": "attribute", "weight": 2 }, { "content": "On the left side of the road, there is a tall soundproof wall.", "type": "scene", "weight": 2 }, { "content": "On the right side, there is a guardrail separating the two-way lanes.", "type": "scene", "weight": 2 }, { "content": "To the right of the guardrail, there is a small truck loaded with goods.", "type": "attribute", "weight": 1 }, { "content": "In the background, a high-rise building is visible on the left side of the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and the vehicles on the lanes also begin to move ahead. Above the road, a traffic gantry appears, with several cameras and a sign reading \"ETC.\" To the left of the gantry, there is an electronic display showing red text warning drivers to watch their speed and avoid tailgating. On the ground, yellow speed limit signs are painted, indicating a speed limit of 40 kilometers per hour. The road is marked with orange patches to serve as a warning.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The vehicles on the lanes also begin to move forward.", "type": "action", "weight": 3 }, { "content": "Above the road, a traffic gantry appears.", "type": "attribute", "weight": 2 }, { "content": "On the gantry, there are several cameras and a sign reading \"ETC.\"", "type": "attribute", "weight": 2 }, { "content": "To the left of the gantry, there is an electronic display screen.", "type": "attribute", "weight": 2 }, { "content": "The display screen shows red text warning drivers to watch their speed and avoid tailgating.", "type": "attribute", "weight": 2 }, { "content": "On the ground, yellow speed limit signs are painted, showing a speed limit of 40 kilometers per hour.", "type": "attribute", "weight": 2 }, { "content": "Orange patches are painted on the road, serving as a warning.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning forward. Ahead, the road forks, adding more lanes, and a toll station appears. Above each lane at the toll station, there are indicator lights in different colors, continuously flashing. To the left of the toll station entrance, there is a green sign showing the locations of the restroom and parking lot.", "visual_elements": [ { "content": "The camera continues to move forward.", "type": "camera", "weight": 3 }, { "content": "Ahead, the road forks, causing more lanes to appear.", "type": "attribute", "weight": 3 }, { "content": "A toll station appears ahead.", "type": "attribute", "weight": 3 }, { "content": "Above each lane at the toll station, there are indicator lights of different colors.", "type": "attribute", "weight": 2 }, { "content": "The indicator lights are continuously flashing.", "type": "attribute", "weight": 2 }, { "content": "To the left of the toll station entrance, there is a green sign showing the locations of restrooms and parking lots.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning forward. The white van enters the second toll lane from the right, while the truck and black sedan enter the first lane on the right. The display screens above the two lanes on the right show \"ETC Only,\" and the ground is marked with blue and white symbols. The vehicles smoothly pass through the toll station and enter a three-lane road. The white van is in the middle lane, with a truck ahead of it, a black sedan in the far left lane, and a silver van in the right lane. In the background, high-rise buildings are visible on both sides of the road.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "The white van enters the second toll lane from the right.", "type": "action", "weight": 3 }, { "content": "The truck and black sedan enter the first lane on the right.", "type": "action", "weight": 2 }, { "content": "Above the two lanes on the right, the display screens read \"ETC Only.\"", "type": "attribute", "weight": 1 }, { "content": "Blue and white symbols are painted on the ground.", "type": "attribute", "weight": 1 }, { "content": "The vehicles smoothly pass through the toll station and enter a three-lane road.", "type": "action", "weight": 3 }, { "content": "The white van is in the middle lane, with a truck ahead.", "type": "attribute", "weight": 2 }, { "content": "In the far left lane, there is a black sedan.", "type": "attribute", "weight": 2 }, { "content": "In the right lane, there is a silver van.", "type": "attribute", "weight": 2 }, { "content": "In the background, high-rise buildings are visible on both sides of the road.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 35 }, { "index": "TUNA_0991", "video_path": "CoVLA/2022-12-06--15-03-31--65_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a three-lane highway. It's slightly dark, and the street lights have already turned on. Several cars and trucks can be seen driving in the scene. In the front left of the shot, specifically on the leftmost lane, there's a small delivery truck with a green tarp covering its rear. There's a green highway sign on the left side of the road ahead, showing \"Ryuzan Noda\" and \"Exit 500m\".\nThe camera starts panning forward. A red sedan enters the scene from the right side, with its license plate blurred. The red sedan drives on the rightmost lane. The small delivery truck on the left lane exits the scene from the left side. Other vehicles on the road continue driving forward. In the background, distant buildings and a tall chimney can be seen, with a small amount of smoke at the top of the chimney.\nThe camera continues panning forward. A white delivery truck on the left lane and a silver car in front of it exit the scene from the left side one by one. There are some signs and shows on the left side of the road. A traffic gantry appears above the road ahead, with a green sign hanging from it, showing \"Ryuzan Noda\" and \"Exit 1-2\". Vehicles in the leftmost lane turn on their left turn signals. The distant sky shows a pale blue and pink sunset.\nThe camera continues panning forward and passes through the gantry. A diverging intersection appears ahead, with an exit sign hanging at the intersection, and yellow warning lights flashing continuously. A car on the left road exits the scene from the left side, and a white delivery truck in front of it slows down and enters the leftmost diverging road. There are dense trees on the right side of the road, and some buildings and trees on the left.", "events": [ { "event": "At the beginning of the video, the camera focuses on a three-lane highway. It's slightly dark, and the street lights have already turned on. Several cars and trucks can be seen driving in the scene. In the front left of the shot, specifically on the leftmost lane, there's a small delivery truck with a green tarp covering its rear. There's a green highway sign on the left side of the road ahead, showing \"Ryuzan Noda\" and \"Exit 500m\".", "visual_elements": [ { "content": "The camera focuses on a three-lane highway.", "type": "camera", "weight": 3 }, { "content": "The sky is slightly dark.", "type": "scene", "weight": 2 }, { "content": "The street lights have already turned on.", "type": "scene", "weight": 2 }, { "content": "Several cars and trucks can be seen driving in the scene.", "type": "attribute", "weight": 3 }, { "content": "In the front left of the shot, specifically on the leftmost lane, there is a small delivery truck.", "type": "attribute", "weight": 2 }, { "content": "The rear of the truck is covered with green tarp.", "type": "attribute", "weight": 2 }, { "content": "There's a green highway sign on the left side of the road ahead, showing \"Ryuzan Noda\" and \"Exit 500m\".", "type": "attribute", "weight": 2 } ] }, { "event": "The camera starts panning forward. A red sedan enters the scene from the right side, with its license plate blurred. The red sedan drives on the rightmost lane. The small delivery truck on the left lane exits the scene from the left side. Other vehicles on the road continue driving forward. In the background, distant buildings and a tall chimney can be seen, with a small amount of smoke at the top of the chimney.", "visual_elements": [ { "content": "The camera starts panning forward.", "type": "camera", "weight": 3 }, { "content": "A red sedan enters the scene from the right side.", "type": "action", "weight": 3 }, { "content": "The license plate is blurred.", "type": "attribute", "weight": 2 }, { "content": "The red sedan drives on the rightmost lane.", "type": "attribute", "weight": 2 }, { "content": "The small delivery truck on the left lane exits the scene from the left.", "type": "action", "weight": 3 }, { "content": "Other vehicles on the road continue moving forward.", "type": "action", "weight": 2 }, { "content": "In the background, distant buildings and a tall chimney can be seen, with a small amount of smoke at the top of the chimney.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues panning forward. A white delivery truck on the left lane and a silver car in front of it exit the scene from the left side one by one. There are some signs and shows on the left side of the road. A traffic gantry appears above the road ahead, with a green sign hanging from it, showing \"Ryuzan Noda\" and \"Exit 1-2\". Vehicles in the leftmost lane turn on their left turn signals. The distant sky shows a pale blue and pink sunset.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "A white delivery truck on the left lane and a silver car ahead of it exit the scene from the left one by one.", "type": "action", "weight": 3 }, { "content": "There are some signs and shows on the left side of the road.", "type": "attribute", "weight": 1 }, { "content": "A traffic gantry appears above the road ahead.", "type": "attribute", "weight": 2 }, { "content": "A green sign hanging on the gantry shows \"Nagayama Noda\" and \"Exit 1-2\".", "type": "attribute", "weight": 2 }, { "content": "The vehicles in the leftmost lane turn on their left turn signals.", "type": "action", "weight": 2 }, { "content": "The distant sky shows a pale blue and orange sunset.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues panning forward and passes through the gantry. A diverging intersection appears ahead, with an exit sign hanging at the intersection, and yellow warning lights flashing continuously. A car on the left road exits the scene from the left side, and a white delivery truck in front of it slows down and enters the leftmost diverging road. There are dense trees on the right side of the road, and some buildings and trees on the left.", "visual_elements": [ { "content": "The camera continues panning forward and passes through the gantry.", "type": "camera", "weight": 3 }, { "content": "A diverging intersection appears ahead.", "type": "attribute", "weight": 3 }, { "content": "An exit sign hangs at the intersection.", "type": "attribute", "weight": 2 }, { "content": "Yellow warning lights are flashing continuously.", "type": "action", "weight": 2 }, { "content": "A car on the left road exits the scene from the left side.", "type": "action", "weight": 3 }, { "content": "A white delivery truck in the front left of the shot slows down and enters the leftmost diverging road.", "type": "action", "weight": 3 }, { "content": "There are dense trees on the right side of the road, and some buildings and trees on the left.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 28 }, { "index": "TUNA_0992", "video_path": "CoVLA/2022-12-14--11-33-48--110_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a highway, from the perspective of a driver. A blue sedan can be seen driving on the right lane, with a few cars cars in the distance ahead. There are guardrails on both sides of the road. The trees on the left side of the road are barren, with almost no leaves on the branches, showing the characteristics of autumn and winter. The trees on the right side of the road are dense, showing green and yellow leaves. The sky is clear, with blue skies, white clouds, and bright sunshine.\nThe camera starts panning forward. The vehicles on the lanes drives forward. The blue sedan gradually moves away from the camera. The road ahead curves slightly to the right. The hills in the background gradually come into view. The blue sedan passes the curve and enters the straight road.At this moment, a black sedan enters from the right side of the scene. There's a traffic sign above the road ahead,  showing exit information. The sign reads \"Sano 900m\", \"Mibu Nishiyama 23km\", and \"Otani 43km\", etc. There are also signs for no passing, speed limits, and parking below the sign.\nThe camera continues panning forward. Some traffic signs on the left side of the road pass by the left side of the scene one after another. The black sedan also gradually moves away from the camera. A white delivery truck can be seen in the distance, driving on the left lane. The trees on the left side of the road are still sparse, with some fallen leaves on the ground. A left turn appears ahead on the road.", "events": [ { "event": "At the beginning of the video, the camera focuses on a highway, from the perspective of a driver. A blue sedan can be seen driving on the right lane, with a few cars cars in the distance ahead. There are guardrails on both sides of the road. The trees on the left side of the road are barren, with almost no leaves on the branches, showing the characteristics of autumn and winter. The trees on the right side of the road are dense, showing green and yellow leaves. The sky is clear, with blue skies, white clouds, and bright sunshine.", "visual_elements": [ { "content": "The camera focuses on a highway.", "type": "camera", "weight": 3 }, { "content": "The perspective is from the driver's point of view.", "type": "camera", "weight": 3 }, { "content": "A blue sedan can be seen driving on the right lane, with a few cars cars in the distance ahead.", "type": "attribute", "weight": 3 }, { "content": "There are guardrails on both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "The trees on the left side of the road are bare, with almost no leaves on the branches.", "type": "scene", "weight": 2 }, { "content": "The trees on the right side of the road are dense, showing green and yellow leaves.", "type": "scene", "weight": 2 }, { "content": "The sky is clear, with blue skies, white clouds, and bright sunshine.", "type": "scene", "weight": 2 } ] }, { "event": "The camera starts panning forward. The vehicles on the lanes drives forward. The blue sedan gradually moves away from the camera. The road ahead curves slightly to the right. The hills in the background gradually come into view. The blue sedan passes the curve and enters the straight road.At this moment, a black sedan enters from the right side of the scene. There's a traffic sign above the road ahead,  showing exit information. The sign reads \"Sano 900m\", \"Mibu Nishiyama 23km\", and \"Otani 43km\", etc. There are also signs for no passing, speed limits, and parking below the sign.", "visual_elements": [ { "content": "The camera starts panning forward.", "type": "camera", "weight": 3 }, { "content": "The vehicles on the lanes drives forward.", "type": "action", "weight": 3 }, { "content": "The blue sedan gradually moves away from the camera.", "type": "attribute", "weight": 2 }, { "content": "The road ahead curves slightly to the right.", "type": "attribute", "weight": 3 }, { "content": "The hills in the background gradually come into view.", "type": "scene", "weight": 2 }, { "content": "The blue sedan passes the curve and enters the straight road.", "type": "action", "weight": 2 }, { "content": "At this moment, a black sedan enters from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "There's a traffic sign above the road ahead,  showing exit information.", "type": "attribute", "weight": 2 }, { "content": "The sign reads \"Sano 900m\", \"Mibu Nishiyama 23km\", and \"Otani 43km\", etc.", "type": "attribute", "weight": 2 }, { "content": "Below the sign, there are signs for no passing, speed limits, and parking.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning forward. Some traffic signs on the left side of the road pass by the left side of the scene one after another. The black sedan also gradually moves away from the camera. A white delivery truck can be seen in the distance, driving on the left lane. The trees on the left side of the road are still sparse, with some fallen leaves on the ground. A left turn appears ahead on the road.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "Some traffic signs on the left side of the road pass by the left side of the scene one after another.", "type": "attribute", "weight": 2 }, { "content": "The black sedan also gradually moves away from the camera.", "type": "attribute", "weight": 2 }, { "content": "A white delivery truck can be seen in the distance, driving on the left lane.", "type": "attribute", "weight": 2 }, { "content": "The trees on the left side of the road are still sparse, with some fallen leaves on the ground.", "type": "scene", "weight": 2 }, { "content": "Ahead, there is a left turn on the road.", "type": "attribute", "weight": 3 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0993", "video_path": "CoVLA/2022-12-14--11-33-48--73_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a straight country road, divided into two lanes by solid yellow lines. There are utility poles and houses on both sides of the road. There's a row of chain-link fences on the left side of the road, and white guardrails on the right. The sky is clear and sunny. There's a gray truck on the right lane, followed by a white car.\nThe camera starts panning forward. The truck and the white car on the right lane exit the scene from the right side one by one. The guardrails on both sides of the road transition into white ones. There are some graffiti and signs on the right guardrail. Behind the guardrails are some trees and shrubs. Some of the buildings on both sides of the road are low-rise houses, and some are taller industrial buildings. A speed limit sign of 40 is painted on the lane. There's a white truck and a silver pickup truck behind it on the right lane.\nThe camera continues panning forward. There's no graffiti or signs on the guardrail on the left side of the road. The right guardrail disappears, and the buildings on the right side of the road directly border the road. The vehicles on the right lane gradually exit the scene from the left side. Open fields and grasslands, interspersed with buildings, are on both sides of the road. When the roadsides are fields and grasslands, there will be fences or guardrails on both sides of the road.", "events": [ { "event": "At the beginning of the video, the camera focuses on a straight country road, divided into two lanes by solid yellow lines. There are utility poles and houses on both sides of the road. There's a row of chain-link fences on the left side of the road, and white guardrails on the right. The sky is clear and sunny. There's a gray truck on the right lane, followed by a white car.", "visual_elements": [ { "content": "The camera focuses on a straight country road.", "type": "camera", "weight": 3 }, { "content": "The road is divided into two lanes by yellow solid lines.", "type": "attribute", "weight": 3 }, { "content": "There are utility poles and houses on both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "There's a row of chain-link fences on the left side of the road, and white guardrails on the right.", "type": "attribute", "weight": 2 }, { "content": "The sky is clear and sunny.", "type": "scene", "weight": 2 }, { "content": "There is a gray truck on the right lane.", "type": "attribute", "weight": 2 }, { "content": "Behind the gray truck, there is a white car.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera starts panning forward. The truck and the white car on the right lane exit the scene from the right side one by one. The guardrails on both sides of the road transition into white ones. There are some graffiti and signs on the right guardrail. Behind the guardrails are some trees and shrubs. Some of the buildings on both sides of the road are low-rise houses, and some are taller industrial buildings. A speed limit sign of 40 is painted on the lane. There's a white truck and a silver pickup truck behind it on the right lane.", "visual_elements": [ { "content": "The camera starts panning forward.", "type": "camera", "weight": 3 }, { "content": "The truck and the white car on the right lane exit the scene from the right side one by one.", "type": "action", "weight": 3 }, { "content": "The guardrails on both sides of the road transition into white ones.", "type": "attribute", "weight": 2 }, { "content": "There are some graffiti and signs on the right guardrails.", "type": "attribute", "weight": 2 }, { "content": "Behind the guardrails are some trees and shrubs.", "type": "attribute", "weight": 2 }, { "content": "Some of the buildings on both sides of the road are low-rise bungalows, and some are taller industrial buildings.", "type": "attribute", "weight": 2 }, { "content": "A speed limit sign of 40 is painted on the lane.", "type": "attribute", "weight": 2 }, { "content": "There's a white truck and a silver pickup truck behind it on the right lane.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning forward. There's no graffiti or signs on the guardrail on the left side of the road. The right guardrail disappears, and the buildings on the right side of the road directly border the road. The vehicles on the right lane gradually exit the scene from the left side. Open fields and grasslands, interspersed with buildings, are on both sides of the road. When the roadsides are fields and grasslands, there will be fences or guardrails on both sides of the road.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "There are no graffiti and signs on the guardrails on the left side of the road.", "type": "attribute", "weight": 1 }, { "content": "The right guardrails disappear, and the buildings on the right side of the road are directly border the road.", "type": "attribute", "weight": 2 }, { "content": "The vehicles on the right lane gradually exit the scene from the left side.", "type": "attribute", "weight": 3 }, { "content": "Open fields and grasslands, interspersed with buildings, are on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "When the roadsides are fields and grasslands, there will be fences or guardrails on both sides of the road.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 21 }, { "index": "TUNA_0994", "video_path": "CoVLA/2022-12-14--13-39-11--21_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on an intersection, with a blue sky and mountains in the distance as the background. The traffic lights at the intersection are green, and there are three cars waiting to pass in the rightmost lane of the opposite intersection. Some buildings and billboards can be seen on both the left and right sides of the intersection, with Japanese text and arrows on the billboards.\nThe camera begins to pan forward, and a black sedan drives straight from the opposite intersection, exiting the scene from the right side. A silver car behind the black sedan turns left with its left turn signal on and exits the scene. The camera slowly pans to the right, and the silver van behind the silver car drives straight, exiting the scene from the right side. Subsequently, the camera accelerates and pans to the right, and a broad straight road appears in the scene. Many vehicles are queuing to pass in the rightmost lane.\nThe camera pans forward, and there is a silver car advances ahead. There are some shops, restaurants, and road signs on the left side of the road. A new branch road merges into the main road ahead on the left, and the road is divided into two lanes by double yellow lines, becoming slightly curved. Trucks, vans, and cars in the right lane pass by the camera one after another, exiting from the right side of the scene. The hillside on the left side of the road is covered with lush trees, and there are utility poles and some trees on the right side of the road.", "events": [ { "event": "At the beginning of the video, the camera focuses on an intersection, with a blue sky and mountains in the distance as the background. The traffic lights at the intersection are green, and there are three cars waiting to pass in the rightmost lane of the opposite intersection. Some buildings and billboards can be seen on both the left and right sides of the intersection, with Japanese text and arrows on the billboards.", "visual_elements": [ { "content": "The camera focuses on an intersection.", "type": "camera", "weight": 3 }, { "content": "The background is a blue sky and mountains in the distance.", "type": "scene", "weight": 2 }, { "content": "The traffic lights at the intersection are green.", "type": "attribute", "weight": 2 }, { "content": "There are three cars waiting to pass in the rightmost lane of the opposite intersection.", "type": "attribute", "weight": 2 }, { "content": "Some buildings and billboards can be seen on both the left and right sides of the intersection.", "type": "attribute", "weight": 2 }, { "content": "The billboard has Japanese text and arrows on it.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and a black sedan drives straight from the opposite intersection, exiting the scene from the right side. A silver car behind the black sedan turns left with its left turn signal on and exits the scene. The camera slowly pans to the right, and the silver van behind the silver car drives straight, exiting the scene from the right side. Subsequently, the camera accelerates and pans to the right, and a broad straight road appears in the scene. Many vehicles are queuing to pass in the rightmost lane.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "A black sedan drives straight from the opposing intersection and exits the scene from the right.", "type": "action", "weight": 3 }, { "content": "A silver car behind the black sedan turns left with its left turn signal on and exits the scene.", "type": "action", "weight": 3 }, { "content": "The camera slowly pans to the right.", "type": "camera", "weight": 3 }, { "content": "The silver van behind the silver car drives straight, exiting the scene from the right side.", "type": "action", "weight": 3 }, { "content": "Subsequently, the camera accelerates and pans to the right.", "type": "camera", "weight": 3 }, { "content": "A broad straight road appears in the scene.", "type": "attribute", "weight": 2 }, { "content": "Many vehicles are queuing to pass in the rightmost lane.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera pans forward, and there is a silver car advances ahead. There are some shops, restaurants, and road signs on the left side of the road. A new branch road merges into the main road ahead on the left, and the road is divided into two lanes by double yellow lines, becoming slightly curved. Trucks, vans, and cars in the right lane pass by the camera one after another, exiting from the right side of the scene. The hillside on the left side of the road is covered with lush trees, and there are utility poles and some trees on the right side of the road.", "visual_elements": [ { "content": "The camera pans forward.", "type": "camera", "weight": 3 }, { "content": "A silver car advances ahead.", "type": "scene", "weight": 2 }, { "content": "There are some shops, restaurants, and road signs on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "A new branch road merges into the main road ahead on the left.", "type": "attribute", "weight": 2 }, { "content": "The road is divided into two lanes by double yellow lines.", "type": "attribute", "weight": 2 }, { "content": "The road becomes slightly curved.", "type": "attribute", "weight": 2 }, { "content": "Trucks, vans, and cars in the right lane pass by the camera one after another, exiting from the right side of the scene.", "type": "action", "weight": 3 }, { "content": "The hillside on the left side of the road is covered with lush trees, and there are utility poles and some trees on the right side of the road.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 22 }, { "index": "TUNA_0995", "video_path": "CoVLA/2022-12-15--12-07-19--93_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a two-way four-lane urban road, with the perspective located behind the vehicles. Ahead is a large white truck. There is a Japanese flag on the back door of the truck. Neatly arranged trees line both sides of the road, with leaves showing the golden yellow of autumn. There is a green wall behind the shrubs on the left side of the road, and the sky is clear and sunny.\nThe camera begins to pan forward, and the vehicles in the lane move ahead. There are some shops and buildings on the right side of the road. There are some pedestrians on the sidewalk on the right side of the road. A crossroads appears ahead, and the traffic lights change from green to yellow and finally to red. The truck ahead first slowed down to let a car make a right turn and then accelerated to pass through the traffic light, which turned green.\nThe camera stops panning. At this time, a white van is stopped on the zebra crossing at the intersection on the left side of the road, waiting. A bus at the opposite intersection turns right with its right turn signal on, and the van stopped on the zebra crossing moves forward a short distance and then stops with its brake lights on. Subsequently, people riding bicycles, pushing bicycles, and pedestrians start moving on the zebra crossing. There is a gas station on the left side of the intersection with the \"ENEOS\" logo.", "events": [ { "event": "At the beginning of the video, the camera focuses on a two-way four-lane urban road, with the perspective located behind the vehicles. Ahead is a large white truck. There is a Japanese flag on the back door of the truck. Neatly arranged trees line both sides of the road, with leaves showing the golden yellow of autumn. There is a green wall behind the shrubs on the left side of the road, and the sky is clear and sunny.", "visual_elements": [ { "content": "The camera focuses on a two-way four-lane urban road.", "type": "camera", "weight": 3 }, { "content": "The perspective is from behind the vehicles.", "type": "camera", "weight": 3 }, { "content": "Ahead is a large white truck.", "type": "attribute", "weight": 2 }, { "content": "There is a Japanese flag on the back door of the truck.", "type": "attribute", "weight": 2 }, { "content": "Neatly arranged trees line both sides of the road, with leaves showing the golden yellow of autumn.", "type": "attribute", "weight": 2 }, { "content": "There is a green wall behind the shrubs on the left side of the road", "type": "attribute", "weight": 2 }, { "content": "The sky is clear and sunny.", "type": "scene", "weight": 2 } ] }, { "event": "The camera begins to pan forward, and the vehicles in the lane move ahead. There are some shops and buildings on the right side of the road. There are some pedestrians on the sidewalk on the right side of the road. A crossroads appears ahead, and the traffic lights change from green to yellow and finally to red. The truck ahead first slowed down to let a car make a right turn and then accelerated to pass through the traffic light, which turned green.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The vehicles on the lane move ahead.", "type": "action", "weight": 3 }, { "content": "There are some shops and buildings on the right side of the road.", "type": "scene", "weight": 2 }, { "content": "There are some pedestrians on the sidewalk on the right side of the road.", "type": "scene", "weight": 2 }, { "content": "An intersection appears ahead.", "type": "attribute", "weight": 2 }, { "content": "The traffic lights change from green to yellow and finally to red.", "type": "attribute", "weight": 3 }, { "content": "The truck ahead first slowed down to let a car make a right turn and then accelerated to pass through the traffic light, which turned green.", "type": "action", "weight": 3 } ] }, { "event": "The camera stops panning. At this time, a white van is stopped on the zebra crossing at the intersection on the left side of the road, waiting. A bus at the opposite intersection turns right with its right turn signal on, and the van stopped on the zebra crossing moves forward a short distance and then stops with its brake lights on. Subsequently, people riding bicycles, pushing bicycles, and pedestrians start moving on the zebra crossing. There is a gas station on the left side of the intersection with the \"ENEOS\" logo.", "visual_elements": [ { "content": "The camera stops panning.", "type": "camera", "weight": 3 }, { "content": "At this time, a white van is stopped on the zebra crossing at the intersection on the left side of the road, waiting.", "type": "attribute", "weight": 3 }, { "content": "A bus at the opposite intersection turns right with its right turn signal on.", "type": "action", "weight": 3 }, { "content": "Meanwhile, the van stopped on the zebra crossing moves forward a short distance and then stops with its brake lights on.", "type": "action", "weight": 3 }, { "content": "Subsequently, people riding bicycles, pushing bicycles, and pedestrians start moving on the zebra crossing.", "type": "action", "weight": 3 }, { "content": "There is a gas station on the left side of the intersection with the \"ENEOS\" logo.", "type": "scene", "weight": 2 } ] } ], "n_events": 3, "n_elements": 20 }, { "index": "TUNA_0996", "video_path": "CoVLA/2022-12-19--12-54-00--106_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a two-way road with dense trees and shrubbery on both sides of the highway. A white truck moves ahead, with some green signs on the back of the truck. There is a white car in the right lane. On the empty land on the left side of the road, trucks and vans are parked. The weather is sunny and bright.\nThe camera begins to pan forward. The parked vehicles on the left leave the scene, and a building appears on the left side of the road. The sign in front of the building reads the name of the company, and the garage on the first floor of the building is full of cars. The right side of the road is still lush with trees and shrubbery.\nThe camera continues panning forward. The road ahead enters a downhill section. Buildings appear on both sides of the road. The building on the left is a beautiful house. There is a path in front of the house, and a car is waiting to turn on the small road. The wall of the building on the right side has the name and logo of the company written on it. The truck ahead is driving on the downhill section, and the house on the left disappears from the scene. Some trees and grass grow on the hillside on the left side of the road. There is a huge billboard on the right side of the road indicating the remaining distance to the destinations listed on it.\nThe camera continues panning forward. The road ahead becomes flat. The left side of the highway is lined with dense trees and shrubbery, and an area on the right is enclosed by turf. Behind the turf on the right side of the road is a row of trees, and behind the trees are buildings. Vehicles on the opposing lane consistently pass through and exit from the right side of the scene. There is a red billboard on the right side of the road ahead. In the background, the hillside and the trees on it are blurred by the sunlight.", "events": [ { "event": "At the beginning of the video, the camera focuses on a two-way road with dense trees and shrubbery on both sides of the highway. A white truck moves ahead, with some green signs on the back of the truck. There is a white car in the right lane. On the empty land on the left side of the road, trucks and vans are parked. The weather is sunny and bright.", "visual_elements": [ { "content": "The camera focuses on a two-way rode.", "type": "camera", "weight": 3 }, { "content": "Dense trees and shrubbery line both sides of the highway.", "type": "attribute", "weight": 2 }, { "content": "A white truck moves ahead", "type": "attribute", "weight": 2 }, { "content": "There are some green signs  on the back of the truck.", "type": "attribute", "weight": 2 }, { "content": "There is a white car in the right lane.", "type": "attribute", "weight": 2 }, { "content": "On the empty land on the left side of the road, trucks and vans are parked.", "type": "attribute", "weight": 2 }, { "content": "The weather is sunny and bright.", "type": "scene", "weight": 2 } ] }, { "event": "The camera begins to pan forward. The parked vehicles on the left leave the scene, and a building appears on the left side of the road. The sign in front of the building reads the name of the company, and the garage on the first floor of the building is full of cars. The right side of the road is still lush with trees and shrubbery.", "visual_elements": [ { "content": "The camera begins to pan forward.", "type": "camera", "weight": 3 }, { "content": "The parked vehicles on the left leave the scene.", "type": "attribute", "weight": 2 }, { "content": "A building appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "The sign in front of the building reads the name of the company.", "type": "attribute", "weight": 2 }, { "content": "The garage on the first floor of the building is full of cars.", "type": "attribute", "weight": 2 }, { "content": "The right side of the road is still lush with trees and shrubbery.", "type": "scene", "weight": 2 } ] }, { "event": "The camera continues panning forward. The road ahead enters a downhill section. Buildings appear on both sides of the road. The building on the left is a beautiful house. There is a path in front of the house, and a car is waiting to turn on the small road. The wall of the building on the right side has the name and logo of the company written on it. The truck ahead is driving on the downhill section, and the house on the left disappears from the scene. Some trees and grass grow on the hillside on the left side of the road. There is a huge billboard on the right side of the road indicating the remaining distance to the destinations listed on it.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "The road ahead enters a downhill section.", "type": "attribute", "weight": 3 }, { "content": "The building on the left side of the road is a beautiful house.", "type": "attribute", "weight": 2 }, { "content": "There is a path in front of the house.", "type": "attribute", "weight": 1 }, { "content": "A car on the path is waiting to turn.", "type": "attribute", "weight": 2 }, { "content": "The wall of the building on the right side has the name and logo of the company written on it.", "type": "attribute", "weight": 2 }, { "content": "The truck ahead is driving on the downhill section.", "type": "action", "weight": 3 }, { "content": "The house on the left disappears from the scene.", "type": "attribute", "weight": 2 }, { "content": "Some trees and grass grow on the hillside on the left side of the road.", "type": "scene", "weight": 2 }, { "content": "There is a huge billboard on the right side of the road indicating the remaining distance to the destinations listed on it.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning forward. The road ahead becomes flat. The left side of the highway is lined with dense trees and shrubbery, and an area on the right is enclosed by turf. Behind the turf on the right side of the road is a row of trees, and behind the trees are buildings. Vehicles on the opposing lane consistently pass through and exit from the right side of the scene. There is a red billboard on the right side of the road ahead. In the background, the hillside and the trees on it are blurred by the sunlight.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "The ahead road becomes flat.", "type": "attribute", "weight": 3 }, { "content": "The left side of the highway is lined with dense trees and shrubbery.", "type": "attribute", "weight": 2 }, { "content": "An area on the right is enclosed by turf.", "type": "attribute", "weight": 2 }, { "content": "Behind the turf on the right side of the road is a row of trees, and behind the trees are buildings.", "type": "attribute", "weight": 2 }, { "content": "Vehicles on the opposing lane consistently pass through and exit from the right side of the scene.", "type": "action", "weight": 2 }, { "content": "There is a red billboard on the right side of the road ahead.", "type": "attribute", "weight": 2 }, { "content": "In the background, the hillside and the trees on it are blurred by the sunlight.", "type": "scene", "weight": 2 } ] } ], "n_events": 4, "n_elements": 31 }, { "index": "TUNA_0997", "video_path": "CoVLA/2022-12-19--15-32-57--6_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "The video begins with the camera focuses on a two-way road, with the perspective shot from inside a car facing forward. In front of the camera is a silver car with its license plate blurred. There are utility poles and wires on both sides of the road. On the left, a white small delivery truck is parked by the roadside with an orange roadblock beside it.Two white signs hang by the roadside ahead of the truck. Half of the gate of a factory on the right side of the road is open.\nThe camera starts panning forward as the silver car drives ahead. A parking lot appears on the right side of the road, with various types of cars parked inside, even including excavators. The parking is separated by nets and fences. On the left side of the road is a shrubbery, with buildings behind it. Vehicles in the right lane continuously pass through the scene and exit from the right side of the scene.\nThe camera continues panning forward, and The left side of the road is fenced off. Vehicles are parked in the open space behind the fence, and a factory building stands behind the space. A construction site under development appears on the right side of the road, separated from the road by earth piles and some wooden barriers.\nThe camera continues panning forward, and the road ahead turns into an uphill section. Open spaces and buildings are intermittently distributed on both sides of the road. A stretch of white guardrail also appears on the left side of the road, with a highly suspended billboard in front of it.", "events": [ { "event": "The video begins with the camera focuses on a two-way road, with the perspective shot from inside a car facing forward. In front of the camera is a silver car with its license plate blurred. There are utility poles and wires on both sides of the road. On the left, a white small delivery truck is parked by the roadside with an orange roadblock beside it.Two white signs hang by the roadside ahead of the truck. Half of the gate of a factory on the right side of the road is open.", "visual_elements": [ { "content": "The camera focuses on a two-way road.", "type": "camera", "weight": 3 }, { "content": "The perspective is shot from inside the car facing forward.", "type": "camera", "weight": 3 }, { "content": "In front of the lens is a silver car.", "type": "attribute", "weight": 2 }, { "content": "The license plate has been blurred out.", "type": "attribute", "weight": 2 }, { "content": "There are utility poles and wires on both sides of the road.", "type": "scene", "weight": 2 }, { "content": "On the left, a white small delivery truck is parked by the roadside.", "type": "attribute", "weight": 2 }, { "content": "Next to the front of the truck, there is an orange roadblock.", "type": "attribute", "weight": 2 }, { "content": "Two white signs hang by the roadside ahead of the truck.", "type": "attribute", "weight": 2 }, { "content": "Half of the gate of a factory on the right side of the road is open.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera starts panning forward as the silver car drives ahead. A parking lot appears on the right side of the road, with various types of cars parked inside, even including excavators. The parking is separated by nets and fences. On the left side of the road is a shrubbery, with buildings behind it. Vehicles in the right lane continuously pass through the scene and exit from the right side of the scene.", "visual_elements": [ { "content": "The camera starts panning forwards.", "type": "camera", "weight": 3 }, { "content": "The silver car drives ahead.", "type": "action", "weight": 3 }, { "content": "A parking lot appears on the right side of the road,", "type": "attribute", "weight": 2 }, { "content": "The parking is filled with various types of cars.", "type": "attribute", "weight": 2 }, { "content": "The parking is separated by nets and fences.", "type": "scene", "weight": 2 }, { "content": "On the left side of the road is a shrubbery, with buildings behind it.", "type": "scene", "weight": 2 }, { "content": "Vehicles in the right lane continuously pass through the scene and exit from the right side of the scene.", "type": "action", "weight": 3 } ] }, { "event": "The camera continues panning forward, and The left side of the road is fenced off. Vehicles are parked in the open space behind the fence, and a factory building stands behind the space. A construction site under development appears on the right side of the road, separated from the road by earth piles and some wooden barriers.", "visual_elements": [ { "content": "The camera continues panning forwards.", "type": "camera", "weight": 3 }, { "content": "The left side of the road is fenced off.", "type": "attribute", "weight": 2 }, { "content": "Vehicles are parked in the open space behind the fence, with a factory building behind the space.", "type": "attribute", "weight": 2 }, { "content": "A construction site under development appears on the right side of the road.", "type": "attribute", "weight": 2 }, { "content": "Earth piles and some wooden barriers separate the construction area from the road.", "type": "attribute", "weight": 1 } ] }, { "event": "The camera continues panning forward, and the road ahead turns into an uphill section. Open spaces and buildings are intermittently distributed on both sides of the road. A stretch of white guardrail also appears on the left side of the road, with a highly suspended billboard in front of it.", "visual_elements": [ { "content": "The camera continues panning forwards.", "type": "camera", "weight": 3 }, { "content": "The road ahead turns into an uphill section.", "type": "attribute", "weight": 2 }, { "content": "Open spaces and buildings are intermittently distributed on both sides of the road.", "type": "attribute", "weight": 2 }, { "content": "A stretch of white guardrail also appears on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "In front of the guardrail, a highly suspended billboard hangs.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 26 }, { "index": "TUNA_0998", "video_path": "CoVLA/2022-12-20--12-44-31--44_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a two-way road with guardrails and utility poles on both sides. There is a sky-blue building on the left, with a dark gray roof. On the left, there is also a white building topped with a green spire. A black car is ahead, and a blue road sign hangs above the road. The road ahead curves slightly to the right.\nThe camera starts panning forward, with the black car driving ahead, and the scenery on both sides of the road gradually changes. Some shrubbery and a yellow traffic sign with a black cross symbol appear on the left. A row of yellow directional signs are installed on the left guardrail. In the distance, a pedestrian overpass spans the road.\nThe camera continues panning forward, and the text on the overpass is clearly visible. The traffic lights below the overpass change from green to yellow and finally to red, while the camera gradually stops panning. The black car accelerates through the intersection when the traffic light turns yellow. A blue truck with its right turn signal on enters from the left side of the screen, slowly turns right, and exits from the right side. Several cars following the truck drive straight through the intersection, entering from the left and exiting on the right. Vehicles at the opposite intersection drive slowly to the intersection and wait to pass. The right turn signal of the vehicles in the left lane on the opposite side is flashing.", "events": [ { "event": "At the beginning of the video, the camera focuses on a two-way road with guardrails and utility poles on both sides. There is a sky-blue building on the left, with a dark gray roof. On the left, there is also a white building topped with a green spire. A black car is ahead, and a blue road sign hangs above the road. The road ahead curves slightly to the right.", "visual_elements": [ { "content": "The camera focuses on a two-way road.", "type": "camera", "weight": 2 }, { "content": "The road is lined with guardrails and utility poles.", "type": "attribute", "weight": 2 }, { "content": "There is a sky-blue building on the left, with a dark gray roof.", "type": "attribute", "weight": 2 }, { "content": "There is also a white building on the left, topped with a green spire.", "type": "attribute", "weight": 2 }, { "content": "A black car is ahead.", "type": "attribute", "weight": 2 }, { "content": "A blue road sign hangs above the road.", "type": "attribute", "weight": 2 }, { "content": "The road ahead curves slightly to the right.", "type": "attribute", "weight": 3 } ] }, { "event": "The camera starts panning forward, with the black car driving ahead, and the scenery on both sides of the road gradually changes. Some shrubbery and a yellow traffic sign with a black cross symbol appear on the left. A row of yellow directional signs are installed on the left guardrail. In the distance, a pedestrian overpass spans the road.", "visual_elements": [ { "content": "The camera starts panning forwards.", "type": "camera", "weight": 3 }, { "content": "The black car drives ahead.", "type": "action", "weight": 3 }, { "content": "Some shrubbery and a yellow traffic sign appear on the left side of the road.", "type": "attribute", "weight": 2 }, { "content": "The traffic sign features a black cross symbol.", "type": "attribute", "weight": 2 }, { "content": "A row of yellow directional signs are installed on the left guardrail.", "type": "attribute", "weight": 2 }, { "content": "In the distance, a pedestrian overpass spans the road.", "type": "attribute", "weight": 2 } ] }, { "event": "The camera continues panning forward, and the text on the overpass is clearly visible. The traffic lights below the overpass change from green to yellow and finally to red, while the camera gradually stops panning. The black car accelerates through the intersection when the traffic light turns yellow. A blue truck with its right turn signal on enters from the left side of the screen, slowly turns right, and exits from the right side. Several cars following the truck drive straight through the intersection, entering from the left and exiting on the right. Vehicles at the opposite intersection drive slowly to the intersection and wait to pass. The right turn signal of the vehicles in the left lane on the opposite side is flashing.", "visual_elements": [ { "content": "The camera continues panning forwards.", "type": "camera", "weight": 3 }, { "content": "The text on the overpass is clearly visible.", "type": "attribute", "weight": 2 }, { "content": "The traffic lights below the overpass change from green to yellow and finally to red.", "type": "attribute", "weight": 3 }, { "content": "The camera gradually stops panning.", "type": "camera", "weight": 3 }, { "content": "The black car accelerates through the intersection when the traffic light turns yellow.", "type": "action", "weight": 3 }, { "content": "A blue truck with its right turn signal on enters from the left side of the scene.", "type": "action", "weight": 3 }, { "content": "The truck turns right slowly, and exits from the right side.", "type": "action", "weight": 3 }, { "content": "Several cars following the truck drive straight through the intersection, entering from the left and exiting on the right.", "type": "action", "weight": 3 }, { "content": "Vehicles at the opposite intersection drive slowly to the intersection and wait to pass.", "type": "action", "weight": 3 }, { "content": "The right turn signal of the vehicles in the left lane on the opposite side is flashing.", "type": "attribute", "weight": 2 } ] } ], "n_events": 3, "n_elements": 23 }, { "index": "TUNA_0999", "video_path": "CoVLA/2022-12-20--14-57-51--47_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the beginning of the video, the camera focuses on a rural road with a clear sky and bright sunshine. Along the road are some low-rise houses and farmland, while utility poles and wires stretch along the road.\nThe camera starts panning forward.The road ahead descends, and vehicles in the opposite lane pass by the camera successively, exiting from the right side of the screen. The building wall on the left side of the road is composed of white fences, while the right side is a lush green vegetable field.\nAs the camera continues panning forward, the road ahead flattens out. The vegetation behind the fence on the left side of the road is neatly trimmed and beautiful, while the right side is dense with trees and shrubbery. The wires on the utility poles above the road crisscross.\nAt the upcoming intersection, the traffic lights are red. The camera slows down and gradually stops panning. A car on the left side of the scene turns left into the straight-ahead lane in front of the camera. There is a constant flow of traffic on the crosswise lanes at the intersection, and two cars wait to pass on the right lane of the opposite intersection. A white signboard is posted on the utility pole on the left side of the scene.", "events": [ { "event": "At the beginning of the video, the camera focuses on a rural road with a clear sky and bright sunshine. Along the road are some low-rise houses and farmland, while utility poles and wires stretch along the road.", "visual_elements": [ { "content": "The camera focuses on a rural road.", "type": "camera", "weight": 3 }, { "content": "The sky is clear and sunny.", "type": "scene", "weight": 2 }, { "content": "Along the road are some low-rise houses and farmland.", "type": "scene", "weight": 2 }, { "content": "Utility poles and wires stretch along the road.", "type": "scene", "weight": 2 } ] }, { "event": "The camera starts panning forward.The road ahead descends, and vehicles in the opposite lane pass by the camera successively, exiting from the right side of the screen. The building wall on the left side of the road is composed of white fences, while the right side is a lush green vegetable field.", "visual_elements": [ { "content": "The camera starts panning forward.", "type": "camera", "weight": 3 }, { "content": "The road ahead descends.", "type": "attribute", "weight": 2 }, { "content": "Vehicles in the opposite lane pass by the camera successively, exiting from the right side of the screen.", "type": "action", "weight": 3 }, { "content": "The building wall on the left side of the road is composed of white fences.", "type": "attribute", "weight": 2 }, { "content": "The right side is a lush green vegetable field.", "type": "attribute", "weight": 2 } ] }, { "event": "As the camera continues panning forward, the road ahead flattens out. The vegetation behind the fence on the left side of the road is neatly trimmed and beautiful, while the right side is dense with trees and shrubbery. The wires on the utility poles above the road crisscross.", "visual_elements": [ { "content": "The camera continues panning forward.", "type": "camera", "weight": 3 }, { "content": "The road ahead flattens out.", "type": "attribute", "weight": 2 }, { "content": "The vegetation behind the fence on the left side of the road is neatly trimmed and beautiful.", "type": "attribute", "weight": 2 }, { "content": "The right side is dense with trees and shrubbery.", "type": "scene", "weight": 2 }, { "content": "The wires on the utility poles above the road crisscross.", "type": "scene", "weight": 2 } ] }, { "event": "At the upcoming intersection, the traffic lights are red. The camera slows down and gradually stops panning. A car on the left side of the scene turns left into the straight-ahead lane in front of the camera. There is a constant flow of traffic on the crosswise lanes at the intersection, and two cars wait to pass on the right lane of the opposite intersection. A white signboard is posted on the utility pole on the left side of the scene.", "visual_elements": [ { "content": "At the upcoming intersection, the traffic lights are red.", "type": "attribute", "weight": 2 }, { "content": "The camera slows down and gradually stops panning.", "type": "camera", "weight": 3 }, { "content": "A car on the left side of the scene turns left into the straight-ahead lane in front of the camera.", "type": "action", "weight": 3 }, { "content": "There is a constant flow of traffic on the crosswise lanes at the intersection.", "type": "action", "weight": 3 }, { "content": "Two cars wait to pass on the right lane of the opposite intersection.", "type": "attribute", "weight": 2 }, { "content": "A white signboard is posted on the utility pole on the left side of the scene.", "type": "attribute", "weight": 2 } ] } ], "n_events": 4, "n_elements": 20 }, { "index": "TUNA_1000", "video_path": "CoVLA/2022-12-23--13-57-07--129_first.mp4", "video_source": "CoVLA", "visual_characteristic": "Low-Dynamic,Multi-Scene", "domain": "Driving", "duration": 30.0, "resolution": { "width": 1936, "height": 1216 }, "caption": "At the start of another segment of the video, the camera focuses on a highway, shoted from the rear. There are two large trucks ahead, with the left truck slightly lagging behind the right one. The left truck's body is adorned with large black text, and other vehicles can be seen in front of it. The rear of the right truck is marked with the word \"YAMAICHI\". The right side of the road is lined with white guardrails, and tall noise barriers are installed next to the road on the right side of the guardrails. Blue and green road signs hang above the road.\nThe two trucks ahead start moving slowly. A white and black police car enters the scene from behind the left truck. The left truck stops moving, while the right truck continues forward. The camera starts panning forward slowly. The police car moves to the right front and aligns with the left truck, then passes through the gap between the two trucks and continues forward along the middle gap between the two lanes, eventually exiting the scene. The red warning light on top of the police car keeps flashing.\nAfter stopping momentarily, the camera continues panning forward slowly. The vehicles in both lanes are moving very slowly. There is less traffic in the lane on the right side of the white guardrails, and vehicles are moving smoothly. The tops of tall buildings appear in the distance.", "events": [ { "event": "At the start of another segment of the video, the camera focuses on a highway, shoted from the rear. There are two large trucks ahead, with the left truck slightly lagging behind the right one. The left truck's body is adorned with large black text, and other vehicles can be seen in front of it. The rear of the right truck is marked with the word \"YAMAICHI\". The right side of the road is lined with white guardrails, and tall noise barriers are installed next to the road on the right side of the guardrails. Blue and green road signs hang above the road.", "visual_elements": [ { "content": "The camera focuses on a highway.", "type": "camera", "weight": 3 }, { "content": "Angle of view shot from the rear.", "type": "camera", "weight": 3 }, { "content": "There are two large trucks ahead.", "type": "attribute", "weight": 3 }, { "content": "The left truck slightly lags behind the right one.", "type": "attribute", "weight": 2 }, { "content": "The left truck's body is adorned with large black text.", "type": "attribute", "weight": 2 }, { "content": "Other vehicles can be seen in front of the left truck.", "type": "attribute", "weight": 2 }, { "content": "The rear of the right truck is marked with the word \"YAMAICHI\".", "type": "attribute", "weight": 2 }, { "content": "The right side of the road is lined with white guardrails.", "type": "attribute", "weight": 2 }, { "content": "Tall noise barriers are installed next to the road on the right side of the guardrails.", "type": "attribute", "weight": 2 }, { "content": "Blue and green road signs hang above the road.", "type": "attribute", "weight": 2 } ] }, { "event": "The two trucks ahead start moving slowly. A white and black police car enters the scene from behind the left truck. The left truck stops moving, while the right truck continues forward. The camera starts panning forward slowly. The police car moves to the right front and aligns with the left truck, then passes through the gap between the two trucks and continues forward along the middle gap between the two lanes, eventually exiting the scene. The red warning light on top of the police car keeps flashing.", "visual_elements": [ { "content": "The two trucks ahead start moving slowly.", "type": "action", "weight": 3 }, { "content": "A white and black police car enters the scene from behind the left truck.", "type": "action", "weight": 3 }, { "content": "The left truck stops moving, while the right truck continues forward.", "type": "action", "weight": 3 }, { "content": "The camera starts panning forward slowly.", "type": "camera", "weight": 3 }, { "content": "The police car moves to the right front and aligns with the left truck, then passes through the gap between the two trucks.", "type": "action", "weight": 3 }, { "content": "The police car continues forward along the middle gap between the two lanes and eventually exits the scene.", "type": "action", "weight": 3 }, { "content": "The red warning light on top of the police car keeps flashing.", "type": "attribute", "weight": 2 } ] }, { "event": "After stopping momentarily, the camera continues panning forward slowly. The vehicles in both lanes are moving very slowly. There is less traffic in the lane on the right side of the white guardrails, and vehicles are moving smoothly. The tops of tall buildings appear in the distance.", "visual_elements": [ { "content": "After stopping momentarily, the camera continues panning forward slowly.", "type": "camera", "weight": 3 }, { "content": "The vehicles in both lanes are moving very slowly.", "type": "attribute", "weight": 2 }, { "content": "There is less traffic in the lane on the right side of the white guardrails.", "type": "scene", "weight": 2 }, { "content": "Vehicles in that lane on the right side of the white guardrails are moving smoothly.", "type": "scene", "weight": 2 }, { "content": "The tops of tall buildings appear in the distance.", "type": "scene", "weight": 1 } ] } ], "n_events": 3, "n_elements": 22 } ]