File size: 9,279 Bytes
e31e7b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
import os
import shutil
import torch
import cv2
from pathlib import Path
import logging
from tqdm import tqdm
import concurrent.futures
from threading import Lock
import time

class FileChecker:
    def __init__(self, source_dir, corrupted_dir, max_workers=32):
        self.source_dir = Path(source_dir)
        self.corrupted_dir = Path(corrupted_dir)
        self.max_workers = max_workers
        self.lock = Lock()
        
        # 统计信息
        self.stats = {
            'total_pt': 0,
            'total_mp4': 0,
            'corrupted_pt': 0,
            'corrupted_mp4': 0,
            'moved_files': [],
            'failed_moves': []
        }
        
        self.setup_logging()
        
    def setup_logging(self):
        """设置日志记录"""
        logging.basicConfig(
            level=logging.INFO,
            format='%(asctime)s - %(levelname)s - %(message)s',
            handlers=[
                logging.FileHandler('file_check.log'),
                logging.StreamHandler()
            ]
        )
        self.logger = logging.getLogger(__name__)
    
    def check_pt_file(self, file_path):
        """检查.pt文件是否损坏"""
        try:
            # 尝试加载torch文件
            data = torch.load(file_path, map_location='cpu')
            # 额外检查:确保数据不为空
            if data is None:
                return False
            return True
        except Exception as e:
            return False
    
    def check_mp4_file(self, file_path):
        """检查.mp4文件是否损坏"""
        try:
            # 尝试打开视频文件
            cap = cv2.VideoCapture(str(file_path))
            if not cap.isOpened():
                return False
            
            # 检查视频属性
            frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
            fps = cap.get(cv2.CAP_PROP_FPS)
            width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
            height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
            
            # 基本属性检查
            if frame_count <= 0 or fps <= 0 or width <= 0 or height <= 0:
                cap.release()
                return False
            
            # 尝试读取几帧来验证
            frames_to_check = min(3, frame_count)
            for i in range(frames_to_check):
                ret, frame = cap.read()
                if not ret or frame is None:
                    cap.release()
                    return False
            
            cap.release()
            return True
        except Exception as e:
            return False
    
    def move_corrupted_file(self, file_path, file_type):
        """移动损坏的文件"""
        try:
            # 保持原有的目录结构
            relative_path = file_path.relative_to(self.source_dir)
            new_path = self.corrupted_dir / relative_path
            new_path.parent.mkdir(parents=True, exist_ok=True)
            
            # 移动文件
            shutil.move(str(file_path), str(new_path))
            
            with self.lock:
                self.stats['moved_files'].append(str(file_path))
                if file_type == 'pt':
                    self.stats['corrupted_pt'] += 1
                else:
                    self.stats['corrupted_mp4'] += 1
            
            self.logger.info(f"已移动损坏文件: {file_path} -> {new_path}")
            return True
            
        except Exception as e:
            with self.lock:
                self.stats['failed_moves'].append(str(file_path))
            self.logger.error(f"移动文件失败 {file_path}: {e}")
            return False
    
    def process_pt_file(self, file_path):
        """处理单个.pt文件"""
        with self.lock:
            self.stats['total_pt'] += 1
        
        if not self.check_pt_file(file_path):
            self.logger.warning(f"发现损坏的 .pt 文件: {file_path}")
            return self.move_corrupted_file(file_path, 'pt')
        return True
    
    def process_mp4_file(self, file_path):
        """处理单个.mp4文件"""
        with self.lock:
            self.stats['total_mp4'] += 1
        
        if not self.check_mp4_file(file_path):
            self.logger.warning(f"发现损坏的 .mp4 文件: {file_path}")
            return self.move_corrupted_file(file_path, 'mp4')
        return True
    
    def process_files(self):
        """多线程处理文件"""
        # 创建损坏文件存储目录
        self.corrupted_dir.mkdir(parents=True, exist_ok=True)
        
        # 收集所有目标文件
        pt_files = list(self.source_dir.rglob('*.pt'))
        # mp4_files = list(self.source_dir.rglob('*.mp4'))
        
        self.logger.info(f"找到 {len(pt_files)} 个 .pt 文件")
        # self.logger.info(f"找到 {len(mp4_files)} 个 .mp4 文件")
        self.logger.info(f"使用 {self.max_workers} 个线程进行处理")
        
        start_time = time.time()
        
        # 处理.pt文件
        if pt_files:
            self.logger.info("开始多线程检查 .pt 文件...")
            with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:
                # 提交所有任务
                future_to_file = {executor.submit(self.process_pt_file, file_path): file_path 
                                 for file_path in pt_files}
                
                # 使用tqdm显示进度
                for future in tqdm(concurrent.futures.as_completed(future_to_file), 
                                 total=len(pt_files), desc="检查 .pt 文件"):
                    file_path = future_to_file[future]
                    try:
                        future.result()
                    except Exception as e:
                        self.logger.error(f"处理文件 {file_path} 时出错: {e}")
        
        # # 处理.mp4文件
        # if mp4_files:
        #     self.logger.info("开始多线程检查 .mp4 文件...")
        #     with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:
        #         # 提交所有任务
        #         future_to_file = {executor.submit(self.process_mp4_file, file_path): file_path 
        #                          for file_path in mp4_files}
                
        #         # 使用tqdm显示进度
        #         for future in tqdm(concurrent.futures.as_completed(future_to_file), 
        #                          total=len(mp4_files), desc="检查 .mp4 文件"):
        #             file_path = future_to_file[future]
        #             try:
        #                 future.result()
        #             except Exception as e:
        #                 self.logger.error(f"处理文件 {file_path} 时出错: {e}")
        
        end_time = time.time()
        processing_time = end_time - start_time
        
        # 输出统计结果
        self.print_statistics(processing_time)
        
        return self.stats
    
    def print_statistics(self, processing_time):
        """输出统计结果"""
        self.logger.info("=" * 60)
        self.logger.info("检查完成!统计结果:")
        self.logger.info(f"处理时间: {processing_time:.2f} 秒")
        self.logger.info(f"使用线程数: {self.max_workers}")
        self.logger.info(f"总 .pt 文件数: {self.stats['total_pt']}")
        self.logger.info(f"损坏 .pt 文件数: {self.stats['corrupted_pt']}")
        self.logger.info(f"总 .mp4 文件数: {self.stats['total_mp4']}")
        self.logger.info(f"损坏 .mp4 文件数: {self.stats['corrupted_mp4']}")
        self.logger.info(f"成功移动文件数: {len(self.stats['moved_files'])}")
        self.logger.info(f"移动失败文件数: {len(self.stats['failed_moves'])}")
        
        if self.stats['total_pt'] + self.stats['total_mp4'] > 0:
            total_files = self.stats['total_pt'] + self.stats['total_mp4']
            files_per_second = total_files / processing_time
            self.logger.info(f"平均处理速度: {files_per_second:.2f} 文件/秒")
        
        self.logger.info("=" * 60)

def main():
    # 配置参数
    source_dir = "/mnt/bn/yufan-dev-my/ysh/Ckpts/Lixsp11/0_final_sekai_dataset"
    corrupted_dir = "/mnt/bn/yufan-dev-my/ysh/Datasets/corrupted_files"
    max_workers = 8
    
    print(f"源目录: {source_dir}")
    print(f"损坏文件将移动到: {corrupted_dir}")
    print(f"并发线程数: {max_workers}")
    print("=" * 50)
    
    # 创建文件检查器并执行
    checker = FileChecker(source_dir, corrupted_dir, max_workers)
    stats = checker.process_files()
    
    # 保存移动文件列表
    if stats['moved_files']:
        with open('moved_files_list.txt', 'w') as f:
            for file_path in stats['moved_files']:
                f.write(f"{file_path}\n")
        print(f"已将移动的文件列表保存到 moved_files_list.txt")
    
    # 保存失败文件列表
    if stats['failed_moves']:
        with open('failed_moves_list.txt', 'w') as f:
            for file_path in stats['failed_moves']:
                f.write(f"{file_path}\n")
        print(f"已将移动失败的文件列表保存到 failed_moves_list.txt")

if __name__ == "__main__":
    main()