v1.1 - Spectral simulation, but very slow

This commit is contained in:
2025-06-07 10:24:21 -04:00
parent d59bd215d8
commit 316bdae3d9
3 changed files with 287 additions and 5 deletions

BIN
docs/spectral_primary_decomposition.pdf (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -6,6 +6,7 @@
# "Pillow", # "Pillow",
# "imageio", # "imageio",
# "rawpy", # "rawpy",
# "colour",
# ] # ]
# /// # ///
@ -29,6 +30,8 @@ from scipy.ndimage import gaussian_filter
import rawpy import rawpy
import os import os
import sys import sys
import colour
from colour.colorimetry import SDS_ILLUMINANTS
import json import json
# --- Configuration --- # --- Configuration ---
@ -491,7 +494,7 @@ def apply_spatial_effects(
# --- 2. Apply Halation --- # --- 2. Apply Halation ---
# This simulates light scattering back through the emulsion # This simulates light scattering back through the emulsion
halation_applied = False halation_applied = True
blurred_image_halation = np.copy( blurred_image_halation = np.copy(
image image
) # Start with potentially diffusion-blurred image ) # Start with potentially diffusion-blurred image
@ -535,6 +538,89 @@ def apply_spatial_effects(
return image return image
def calculate_film_log_exposure(image_linear_srgb, # Assuming input is converted to linear sRGB
spectral_data: list[SpectralSensitivityCurvePoint],
ref_illuminant_spd, # e.g., colour.SDS_ILLUMINANTS['D65']
middle_gray_logE,
EPSILON=1e-10):
"""
Calculates the effective log exposure for each film layer based on spectral sensitivities.
This version correctly maps film layers (RGB) to dye sensitivities (CMY)
and calibrates exposure without distorting color balance.
"""
# --- 1. Define a common spectral shape and align all data ---
common_shape = colour.SpectralShape(380, 780, 5)
common_wavelengths = common_shape.wavelengths
# --- THE FIX IS HERE: Correct mapping of film layers to sensitivities ---
# The 'R' layer of our model corresponds to the Cyan dye ('c') sensitivity curve.
# The 'G' layer of our model corresponds to the Magenta dye ('m') sensitivity curve.
# The 'B' layer of our model corresponds to the Yellow dye ('y') sensitivity curve.
film_sens_R = interp1d([p.wavelength for p in spectral_data], [p.c for p in spectral_data], bounds_error=False, fill_value=0)(common_wavelengths)
film_sens_G = interp1d([p.wavelength for p in spectral_data], [p.m for p in spectral_data], bounds_error=False, fill_value=0)(common_wavelengths)
film_sens_B = interp1d([p.wavelength for p in spectral_data], [p.y for p in spectral_data], bounds_error=False, fill_value=0)(common_wavelengths)
film_spectral_sensitivities = np.stack([film_sens_R, film_sens_G, film_sens_B], axis=-1)
print(f"Film spectral sensitivities shape: {film_spectral_sensitivities.shape}")
# Align Reference Illuminant to our common shape
illuminant_aligned = ref_illuminant_spd.copy().align(common_shape)
# --- 2. Use Mallett (2019) to get spectral reflectance from sRGB input ---
# Get the three sRGB spectral primary basis functions
mallett_basis_sds = colour.recovery.MSDS_BASIS_FUNCTIONS_sRGB_MALLETT2019
mallett_basis_aligned = mallett_basis_sds.copy().align(common_shape)
print(f"Mallett basis shape: {mallett_basis_aligned.shape}")
# This is the core of Mallett's method: the reflectance spectrum of any sRGB color
# is a linear combination of these three basis spectra, weighted by the linear sRGB values.
# S(lambda) = R * Sr(lambda) + G * Sg(lambda) + B * Sb(lambda)
spectral_reflectance = np.einsum(
'...c, kc -> ...k', # c: sRGB channels, k: wavelengths
image_linear_srgb, mallett_basis_aligned.values
)
print(f"Spectral reflectance shape: {spectral_reflectance.shape}")
# --- 3. Calculate Scene Light and Film Exposure ---
# The light hitting the film is the spectral reflectance multiplied by the scene illuminant.
light_from_scene = spectral_reflectance * illuminant_aligned.values
print(f"Light from scene shape: {light_from_scene.shape}")
# The exposure on each film layer is the integral of scene light * layer sensitivity.
# The einsum here performs that multiplication and summation (integration) in one step.
film_exposure_values = np.einsum(
'...k, ks -> ...s', # k: wavelengths, s: film layers
light_from_scene, film_spectral_sensitivities
)
print(f"Film exposure values shape: {film_exposure_values.shape}")
# --- 4. Calibrate Exposure Level (The Right Way) ---
# We now need to anchor this exposure to the datasheet's middle_gray_logE.
# First, find the exposure produced by a perfect 18.4% gray card.
gray_srgb_linear = np.array([0.184, 0.184, 0.184])
gray_reflectance = np.einsum('c, kc -> k', gray_srgb_linear, mallett_basis_aligned.values)
gray_light = gray_reflectance * illuminant_aligned.values
exposure_18_gray_film = np.einsum('k, ks -> s', gray_light, film_spectral_sensitivities)
print(f"Exposure for 18.4% gray card shape: {exposure_18_gray_film.shape}")
# The datasheet says that a middle gray exposure should result in a log value of -1.44 (for Portra).
# Our "green" channel is the usual reference for overall brightness.
# Let's find out what log exposure our 18.4% gray card *actually* produced on the green layer.
log_exposure_of_gray_on_green_layer = np.log10(exposure_18_gray_film[1] + EPSILON)
# Now, calculate the difference (the "shift") needed to match the datasheet.
log_shift = middle_gray_logE - log_exposure_of_gray_on_green_layer
print(f"Log shift to match middle gray: {log_shift:.3f}")
# --- 5. Final Conversion to Log Exposure ---
# Apply this single brightness shift to all three channels. This preserves the
# film's inherent color balance while correctly setting the exposure level.
log_exposure_rgb = np.log10(film_exposure_values + EPSILON) + log_shift
print(f"Log exposure RGB shape: {log_exposure_rgb.shape}")
return log_exposure_rgb
def main(): def main():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Simulate film stock color characteristics using a datasheet JSON." description="Simulate film stock color characteristics using a datasheet JSON."
@ -650,10 +736,11 @@ def main():
print("Converting linear RGB to Log Exposure...") print("Converting linear RGB to Log Exposure...")
middle_gray_logE = float(datasheet.properties.calibration.middle_gray_logE) middle_gray_logE = float(datasheet.properties.calibration.middle_gray_logE)
# Add epsilon inside log10 to handle pure black pixels # Add epsilon inside log10 to handle pure black pixels
log_exposure_rgb = middle_gray_logE + np.log10(image_linear / 0.18 + EPSILON) log_exposure_rgb = calculate_film_log_exposure(image_linear,
# Note: Values below 0.18 * 10**(hd_data['LogE'][0] - middle_gray_logE) datasheet.properties.curves.spectral_sensitivity,
# or above 0.18 * 10**(hd_data['LogE'][-1] - middle_gray_logE) SDS_ILLUMINANTS['D65'], # Use D65 as reference illuminant
# will map outside the H&D curve's defined LogE range and rely on clamping/extrapolation. middle_gray_logE, EPSILON)
# 2. Apply H&D Curves (Tonal Mapping + Balance Shifts + Gamma/Contrast) # 2. Apply H&D Curves (Tonal Mapping + Balance Shifts + Gamma/Contrast)
print("Applying H&D curves...") print("Applying H&D curves...")

192
sim_data/ektar_100.json Normal file
View File

@ -0,0 +1,192 @@
{
"info": {
"name": "Ektar 100",
"description": "KODAK PROFESSIONAL EKTAR 100 Film is the world's finest grain color negative film. With ISO 100 speed, high saturation and ultra-vivid color, this film offers the finest, smoothest grain of any color negative film available today. An ideal choice for commercial photographers and advanced amateurs, KODAK PROFESSIONAL EKTAR 100 Film is recommended for applications such as nature, travel and outdoor photography, as well as for fashion and product photography.",
"format_mm": 35,
"version": "1.0.0"
},
"processing": {
"gamma": {
"r_factor": 1.0,
"g_factor": 1.0,
"b_factor": 1.0
},
"balance": {
"r_shift": 0.0,
"g_shift": 0.0,
"b_shift": 0.0
}
},
"properties": {
"calibration": {
"iso": 100,
"middle_gray_logh": -0.84
},
"halation": {
"strength": {
"r": 0.015,
"g": 0.007,
"b": 0.002
},
"size_um": {
"r": 200.0,
"g": 100.0,
"b": 50.0
}
},
"couplers": {
"amount": 1.0,
"diffusion_um": 5.0
},
"interlayer": {
"diffusion_um": 2.1
},
"curves": {
"hd": [
{"d":-2.823,"r":0.21,"g":0.636,"b":0.844},
{"d":-2.664,"r":0.211,"g":0.649,"b":0.876},
{"d":-2.508,"r":0.232,"g":0.652,"b":0.871},
{"d":-2.35,"r":0.235,"g":0.656,"b":0.883},
{"d":-2.196,"r":0.256,"g":0.672,"b":0.927},
{"d":-2.045,"r":0.295,"g":0.713,"b":0.998},
{"d":-1.902,"r":0.349,"g":0.77,"b":1.087},
{"d":-1.762,"r":0.408,"g":0.84,"b":1.17},
{"d":-1.63,"r":0.479,"g":0.919,"b":1.246},
{"d":-1.499,"r":0.551,"g":0.992,"b":1.353},
{"d":-1.368,"r":0.622,"g":1.065,"b":1.44},
{"d":-1.242,"r":0.699,"g":1.141,"b":1.525},
{"d":-1.116,"r":0.776,"g":1.217,"b":1.61},
{"d":-0.991,"r":0.854,"g":1.293,"b":1.695},
{"d":-0.863,"r":0.929,"g":1.368,"b":1.784},
{"d":-0.735,"r":1.004,"g":1.443,"b":1.875},
{"d":-0.606,"r":1.078,"g":1.517,"b":1.965},
{"d":-0.476,"r":1.151,"g":1.594,"b":2.054},
{"d":-0.345,"r":1.223,"g":1.67,"b":2.143},
{"d":-0.215,"r":1.295,"g":1.747,"b":2.233},
{"d":-0.082,"r":1.365,"g":1.821,"b":2.315},
{"d":0.05,"r":1.435,"g":1.894,"b":2.397},
{"d":0.183,"r":1.504,"g":1.963,"b":2.479},
{"d":0.315,"r":1.575,"g":2.03,"b":2.56},
{"d":0.447,"r":1.645,"g":2.097,"b":2.642},
{"d":0.58,"r":1.714,"g":2.164,"b":2.725},
{"d":0.719,"r":1.774,"g":2.232,"b":2.81},
{"d":0.866,"r":1.822,"g":2.299,"b":2.899},
{"d":1.003,"r":1.886,"g":2.362,"b":2.988},
{"d":1.141,"r":1.949,"g":2.422,"b":3.076}
],
"spectral_sensitivity" : [
{"wavelength":379.858,"y":1.087,"m":0,"c":0},
{"wavelength":380.734,"y":1.157,"m":0,"c":0},
{"wavelength":382.979,"y":1.224,"m":0,"c":0},
{"wavelength":384.847,"y":1.293,"m":0,"c":0},
{"wavelength":386.574,"y":1.361,"m":0,"c":0},
{"wavelength":388.3,"y":1.43,"m":0,"c":0},
{"wavelength":389.049,"y":1.499,"m":0,"c":0},
{"wavelength":389.871,"y":1.568,"m":0,"c":0},
{"wavelength":391.745,"y":1.636,"m":0,"c":0},
{"wavelength":393.2,"y":1.705,"m":0,"c":0},
{"wavelength":395.008,"y":1.773,"m":0.828,"c":0},
{"wavelength":397.35,"y":1.84,"m":0.898,"c":0},
{"wavelength":399.697,"y":1.907,"m":0.9275,"c":0},
{"wavelength":402.647,"y":1.973,"m":0.957,"c":0},
{"wavelength":407.065,"y":2.032,"m":0.917,"c":0},
{"wavelength":415.314,"y":2.033,"m":0.858,"c":0},
{"wavelength":423.059,"y":2.027,"m":0.802,"c":0},
{"wavelength":431.019,"y":2.056,"m":0.738,"c":0},
{"wavelength":438.757,"y":2.089,"m":0.676,"c":0},
{"wavelength":445.108,"y":2.05,"m":0.627,"c":0},
{"wavelength":453.57,"y":2.039,"m":0.617,"c":0},
{"wavelength":460.523,"y":2.077,"m":0.616,"c":0},
{"wavelength":466.748,"y":2.127,"m":0.634,"c":0},
{"wavelength":472.11,"y":2.153,"m":0.704,"c":0},
{"wavelength":476.647,"y":2.095,"m":0.779,"c":0},
{"wavelength":478.713,"y":2.028,"m":0.856,"c":0},
{"wavelength":479.685,"y":1.959,"m":0.934,"c":0},
{"wavelength":480.301,"y":1.889,"m":0.934,"c":0},
{"wavelength":480.71,"y":1.819,"m":0.934,"c":0},
{"wavelength":481.119,"y":1.749,"m":0.974,"c":0},
{"wavelength":481.528,"y":1.68,"m":1.01,"c":0},
{"wavelength":482.053,"y":1.61,"m":1.01,"c":0},
{"wavelength":483.297,"y":1.541,"m":1.087,"c":0},
{"wavelength":484.542,"y":1.472,"m":1.163,"c":0},
{"wavelength":485.787,"y":1.402,"m":1.163,"c":0},
{"wavelength":486.582,"y":1.333,"m":1.163,"c":0},
{"wavelength":486.82,"y":1.263,"m":1.163,"c":0},
{"wavelength":487.058,"y":1.193,"m":1.163,"c":0},
{"wavelength":487.296,"y":1.123,"m":1.163,"c":0},
{"wavelength":487.7,"y":1.054,"m":1.163,"c":0},
{"wavelength":488.225,"y":0.984,"m":1.237,"c":0},
{"wavelength":488.75,"y":0.914,"m":1.237,"c":0},
{"wavelength":489.274,"y":0.844,"m":1.237,"c":0},
{"wavelength":489.799,"y":0.775,"m":1.237,"c":0},
{"wavelength":490.324,"y":0.705,"m":1.237,"c":0},
{"wavelength":490.848,"y":0.635,"m":1.237,"c":0},
{"wavelength":491.662,"y":0.566,"m":1.237,"c":0},
{"wavelength":492.691,"y":0.496,"m":1.306,"c":0},
{"wavelength":493.719,"y":0.427,"m":1.306,"c":0},
{"wavelength":494.748,"y":0.357,"m":1.306,"c":0},
{"wavelength":501.114,"y":0,"m":1.367,"c":0},
{"wavelength":505.439,"y":0,"m":1.436,"c":0},
{"wavelength":513.404,"y":0,"m":1.481,"c":0},
{"wavelength":517.811,"y":0,"m":1.551,"c":0},
{"wavelength":523.208,"y":0,"m":1.616,"c":0},
{"wavelength":528.813,"y":0,"m":1.68,"c":0},
{"wavelength":534.368,"y":0,"m":1.745,"c":0},
{"wavelength":540.255,"y":0,"m":1.807,"c":0},
{"wavelength":548.653,"y":0,"m":1.813,"c":0},
{"wavelength":556.798,"y":0,"m":1.776,"c":0.196},
{"wavelength":564.155,"y":0,"m":1.729,"c":0.273},
{"wavelength":568.63,"y":0,"m":1.665,"c":0.34},
{"wavelength":574.823,"y":0,"m":1.618,"c":0.389},
{"wavelength":576.314,"y":0,"m":1.541,"c":0.469},
{"wavelength":577.206,"y":0,"m":1.463,"c":0.549},
{"wavelength":579.67,"y":0,"m":1.388,"c":0.63},
{"wavelength":580.747,"y":0,"m":1.31,"c":0.71},
{"wavelength":581.382,"y":0,"m":1.232,"c":0.791},
{"wavelength":582.016,"y":0,"m":1.154,"c":0.872},
{"wavelength":582.763,"y":0,"m":1.076,"c":0.952},
{"wavelength":583.519,"y":0,"m":0.998,"c":1.032},
{"wavelength":584.385,"y":0,"m":0.92,"c":1.113},
{"wavelength":585.366,"y":0,"m":0.842,"c":1.193},
{"wavelength":586.078,"y":0,"m":0.764,"c":1.273},
{"wavelength":586.625,"y":0,"m":0.686,"c":1.354},
{"wavelength":587.879,"y":0,"m":0.609,"c":1.433},
{"wavelength":588.88,"y":0,"m":0.531,"c":1.433},
{"wavelength":589.608,"y":0,"m":0.453,"c":1.51},
{"wavelength":590.336,"y":0,"m":0.375,"c":1.51},
{"wavelength":594.453,"y":0,"m":0,"c":1.587},
{"wavelength":598.504,"y":0,"m":0,"c":1.661},
{"wavelength":603.379,"y":0,"m":0,"c":1.732},
{"wavelength":612.446,"y":0,"m":0,"c":1.762},
{"wavelength":621.674,"y":0,"m":0,"c":1.797},
{"wavelength":629.773,"y":0,"m":0,"c":1.845},
{"wavelength":636.679,"y":0,"m":0,"c":1.905},
{"wavelength":643.174,"y":0,"m":0,"c":1.968},
{"wavelength":649.559,"y":0,"m":0,"c":2.031},
{"wavelength":654.536,"y":0,"m":0,"c":2.026},
{"wavelength":658.974,"y":0,"m":0,"c":1.954},
{"wavelength":663.966,"y":0,"m":0,"c":1.885},
{"wavelength":664.694,"y":0,"m":0,"c":1.804},
{"wavelength":666.539,"y":0,"m":0,"c":1.724},
{"wavelength":668.583,"y":0,"m":0,"c":1.644},
{"wavelength":670.668,"y":0,"m":0,"c":1.565},
{"wavelength":672.585,"y":0,"m":0,"c":1.485},
{"wavelength":674.101,"y":0,"m":0,"c":1.405},
{"wavelength":675.881,"y":0,"m":0,"c":1.325},
{"wavelength":677.952,"y":0,"m":0,"c":1.246},
{"wavelength":680.023,"y":0,"m":0,"c":1.166},
{"wavelength":682.496,"y":0,"m":0,"c":1.088},
{"wavelength":685.061,"y":0,"m":0,"c":1.009},
{"wavelength":687.356,"y":0,"m":0,"c":0.93},
{"wavelength":689.036,"y":0,"m":0,"c":0.85},
{"wavelength":690.741,"y":0,"m":0,"c":0.77},
{"wavelength":692.624,"y":0,"m":0,"c":0.69},
{"wavelength":694.105,"y":0,"m":0,"c":0.61},
{"wavelength":695.373,"y":0,"m":0,"c":0.53},
{"wavelength":696.458,"y":0,"m":0,"c":0.449},
{"wavelength":697.344,"y":0,"m":0,"c":0.368},
{"wavelength":696.035,"y":0,"m":0,"c":0.291}
]
}
}
}