many fixes to color, generally good output now

This commit is contained in:
2025-06-21 09:19:54 -04:00
parent 94408b1316
commit d4036cbf78
2 changed files with 368 additions and 26 deletions

View File

@ -11,19 +11,34 @@ from functools import partial
# This dictionary maps the desired abbreviation to the full command-line flag.
# This makes it easy to add or remove flags in the future.
# This dictionary maps the desired abbreviation to the full command-line flag.
# Arguments are organized into "oneof" groups to avoid invalid combinations.
ARGS_MAP = {
# 'fd': '--force-d65',
# 'pnc': '--perform-negative-correction',
'pwb': '--perform-white-balance',
'pec': '--perform-exposure-correction',
# 'pwb': '--perform-white-balance',
# 'pec': '--perform-exposure-correction',
# 'rae': '--raw-auto-exposure',
'sg': '--simulate-grain',
# 'mg': '--mono-grain'
}
# Groups of mutually exclusive arguments (only one from each group should be used)
ONEOF_GROUPS = [
{
'smf': ['--scanner-type', 'frontier'],
'smh': ['--scanner-type', 'hasselblad'],
'smn': ['--scanner-type', 'noritsu']
},
{
'sg': '--simulate-grain',
'mg': '--mono-grain'
}
]
# --- Worker Function for Multiprocessing ---
def run_filmcolor_command(job_info, filmcolor_path):
def run_filmcolor_command(job_info, filmcolor_path, dry_run=False):
"""
Executes a single filmcolor command.
This function is designed to be called by a multiprocessing Pool.
@ -36,10 +51,18 @@ def run_filmcolor_command(job_info, filmcolor_path):
datasheet,
output_file
]
command.extend(flags)
# Add all flags to the command
for flag in flags:
if isinstance(flag, list):
command.extend(flag) # For arguments with values like ['--scanner-model', 'frontier']
else:
command.append(flag) # For simple flags like '--simulate-grain'
command_str = " ".join(command)
print(f"🚀 Starting job: {os.path.basename(output_file)}")
if dry_run:
return f"🔍 DRY RUN: {command_str} (not executed)"
try:
# Using subprocess.run to execute the command
@ -94,6 +117,16 @@ def main():
default=3,
help="Number of parallel jobs to run. (Default: 3)"
)
parser.add_argument(
"--dry-run",
action='store_true',
help="If set, will only print the commands without executing them."
)
parser.add_argument(
"--refresh",
action='store_true',
help="If set, will reprocess existing output files. Otherwise, skips files that already exist."
)
args = parser.parse_args()
# 1. Find all input RAW files
@ -126,15 +159,44 @@ def main():
print(f" Found {len(datasheet_files)} datasheet files.")
# 3. Generate all argument combinations
arg_abbreviations = list(ARGS_MAP.keys())
# Get regular standalone arguments
standalone_args = list(ARGS_MAP.keys())
# Generate all possible combinations of regular args
standalone_arg_combos = []
for i in range(len(standalone_args) + 1):
for combo in itertools.combinations(standalone_args, i):
standalone_arg_combos.append(sorted(list(combo)))
# Create all possible combinations with oneof groups
all_arg_combos = []
# Loop from 0 to len(abbreviations) to get combinations of all lengths
for i in range(len(arg_abbreviations) + 1):
for combo in itertools.combinations(arg_abbreviations, i):
all_arg_combos.append(sorted(list(combo))) # Sort for consistent naming
# For each oneof group, we need to include either one option or none
oneof_options = []
for group in ONEOF_GROUPS:
# Add an empty list to represent using no option from this group
group_options = [None]
# Add each option from the group
group_options.extend(group.keys())
oneof_options.append(group_options)
# Generate all combinations of oneof options
for oneof_combo in itertools.product(*oneof_options):
# Filter out None values
oneof_combo = [x for x in oneof_combo if x is not None]
# Combine with standalone args
for standalone_combo in standalone_arg_combos:
# Combine the two lists and sort for consistent naming
combined_combo = sorted(standalone_combo + oneof_combo)
all_arg_combos.append(combined_combo)
# Remove any duplicates
all_arg_combos = [list(x) for x in set(map(tuple, all_arg_combos))]
# 4. Create the full list of jobs to run
jobs_to_run = []
skipped_jobs = 0
for raw_file_path in raw_files:
input_dir = os.path.dirname(raw_file_path)
input_filename = os.path.basename(raw_file_path)
@ -153,14 +215,36 @@ def main():
output_path = os.path.join(input_dir, output_name)
# Skip if file exists and --refresh is not set
if os.path.exists(output_path) and not args.refresh:
skipped_jobs += 1
continue
# Get the full flags from the abbreviations
flags = [ARGS_MAP[abbr] for abbr in arg_combo_abbrs] + ['--perform-negative-correction'] # always include this flag
flags = []
for abbr in arg_combo_abbrs:
# Check if this is from a oneof group
is_oneof = False
for group in ONEOF_GROUPS:
if abbr in group:
flags.append(group[abbr])
is_oneof = True
break
# If not from a oneof group, use the regular ARGS_MAP
if not is_oneof and abbr in ARGS_MAP:
flags.append(ARGS_MAP[abbr])
# Add required flags
flags.extend(['--perform-negative-correction', "--perform-white-balance", '--perform-exposure-correction'])
# Add the complete job description to our list
jobs_to_run.append((raw_file_path, datasheet_path, output_path, flags))
total_jobs = len(jobs_to_run)
print(f"\n✨ Generated {total_jobs} total jobs to run.")
if skipped_jobs > 0:
print(f"⏭️ Skipped {skipped_jobs} existing output files. Use --refresh to reprocess them.")
if total_jobs == 0:
print("Nothing to do. Exiting.")
sys.exit(0)
@ -175,11 +259,10 @@ def main():
print("\nAborted by user.")
sys.exit(0)
# 5. Run the jobs in a multiprocessing pool
print("\n--- Starting Testbench ---\n")
# `partial` is used to "pre-fill" the filmcolor_path argument of our worker function
worker_func = partial(run_filmcolor_command, filmcolor_path=args.filmcolor_path)
worker_func = partial(run_filmcolor_command, filmcolor_path=args.filmcolor_path, dry_run=args.dry_run)
with Pool(processes=args.jobs) as pool:
# imap_unordered is great for this: it yields results as they complete,