| | import os |
| | import shutil |
| | import re |
| | from collections import defaultdict |
| |
|
| | def get_all_source_folders(scrape_dir): |
| | """ |
| | Recursively scrapes a directory to get all folders. |
| | Returns a dictionary mapping: folder_name -> list of full_source_paths |
| | """ |
| | source_folders = defaultdict(list) |
| | |
| | if not os.path.exists(scrape_dir): |
| | print(f"Error: Scrape directory '{scrape_dir}' does not exist.") |
| | return source_folders |
| |
|
| | |
| | for root, dirs, files in os.walk(scrape_dir): |
| | for d in dirs: |
| | source_folders[d].append(os.path.join(root, d)) |
| | |
| | return source_folders |
| |
|
| | def get_subfolder_names(folder_path): |
| | """Returns a list of subfolder names immediately inside a given directory.""" |
| | return [d for d in os.listdir(folder_path) if os.path.isdir(os.path.join(folder_path, d))] |
| |
|
| | def process_folders(scrape_dir, dest_root_path, dry_run=True): |
| | |
| | source_folders = get_all_source_folders(scrape_dir) |
| | if not source_folders: |
| | print("No source folders found.") |
| | return |
| |
|
| | if not os.path.exists(dest_root_path): |
| | print(f"Error: Destination root '{dest_root_path}' does not exist.") |
| | return |
| |
|
| | print("=" * 60) |
| | print(f"MODE: {'DRY RUN (Mapping Only)' if dry_run else 'EXECUTION (Copying Files)'}") |
| | print("=" * 60) |
| |
|
| | |
| | file_pattern = re.compile(r'^(\d{4}-\d{2}-\d{2}_\d{2})-\d{2}-\d{2}') |
| |
|
| | |
| | for level1 in os.listdir(dest_root_path): |
| | level1_path = os.path.join(dest_root_path, level1) |
| | if not os.path.isdir(level1_path): |
| | continue |
| | |
| | for level2 in os.listdir(level1_path): |
| | dest_path = os.path.join(level1_path, level2) |
| | if not os.path.isdir(dest_path): |
| | continue |
| |
|
| | folder_name = level2 |
| |
|
| | |
| | src_paths = source_folders.get(folder_name, []) |
| | |
| | |
| | if not src_paths: |
| | if dry_run: |
| | print(f"[UNMATCHED] Dest: '{folder_name}' -> No source found.") |
| | continue |
| | |
| | if len(src_paths) > 1: |
| | print(f"[CONFLICT] Dest: '{folder_name}' -> Matches {len(src_paths)} sources. Skipping.") |
| | if dry_run: |
| | for p in src_paths: |
| | print(f" - {p}") |
| | continue |
| |
|
| | |
| | src_path = src_paths[0] |
| |
|
| | if dry_run: |
| | print(f"[MAPPED] Dest: '{folder_name}' \n <- Source: '{src_path}'") |
| | continue |
| |
|
| | |
| | |
| | |
| |
|
| | |
| | src_subfolders = get_subfolder_names(src_path) |
| |
|
| | |
| | for subfolder in src_subfolders: |
| | src_sub_path = os.path.join(src_path, subfolder) |
| | dest_sub_path = os.path.join(dest_path, subfolder) |
| |
|
| | |
| | os.makedirs(dest_sub_path, exist_ok=True) |
| |
|
| | |
| | files = [f for f in os.listdir(src_sub_path) if os.path.isfile(os.path.join(src_sub_path, f))] |
| | files.sort() |
| | |
| | selected_files = {} |
| | for file_name in files: |
| | match = file_pattern.search(file_name) |
| | if match: |
| | hour_key = match.group(1) |
| | if hour_key not in selected_files: |
| | selected_files[hour_key] = file_name |
| |
|
| | |
| | copied_count = 0 |
| | for file_name in selected_files.values(): |
| | src_file = os.path.join(src_sub_path, file_name) |
| | dest_file = os.path.join(dest_sub_path, file_name) |
| | |
| | if os.path.exists(dest_file): |
| | continue |
| | |
| | shutil.copy2(src_file, dest_file) |
| | copied_count += 1 |
| | |
| | if copied_count > 0: |
| | print(f"Copied {copied_count} new files into '{folder_name}/{subfolder}'.") |
| | |
| | print("\nProcess completed.") |
| |
|
| | if __name__ == "__main__": |
| | |
| | SCRAPE_DIRECTORY = "/media/ngaggion/DATA/Raices/Datasets" |
| | DESTINATION_ROOT = "/media/ngaggion/DATA/Raices/NewData/ArabidopsisDataset" |
| | |
| | |
| | |
| | process_folders(SCRAPE_DIRECTORY, DESTINATION_ROOT, dry_run=False) |