This commit is contained in:
parent
6335751f0f
commit
ae804aaa42
42
.gitea/workflows/docker-publish.yml
Normal file
42
.gitea/workflows/docker-publish.yml
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
name: Docker Build and Publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
tags: [ 'v*' ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: git.dws.rip/${{ github.repository }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=sha
|
||||||
|
|
||||||
|
- name: Login to Gitea Container Registry
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: git.dws.rip
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITEA_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
28
Dockerfile
Normal file
28
Dockerfile
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create non-root user
|
||||||
|
RUN useradd -m -r -s /bin/bash appuser
|
||||||
|
|
||||||
|
# Create necessary directories
|
||||||
|
WORKDIR /app
|
||||||
|
RUN mkdir -p /app/uploads /app/thumbnails
|
||||||
|
RUN chown -R appuser:appuser /app
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt gunicorn
|
||||||
|
|
||||||
|
# Copy application files
|
||||||
|
COPY --chown=appuser:appuser templates /app/templates
|
||||||
|
COPY --chown=appuser:appuser app.py config.py models.py steganography.py ./
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Use gunicorn for production
|
||||||
|
CMD ["gunicorn", "--bind", "0.0.0.0:5000", "--workers", "4", "--timeout", "120", "--access-logfile", "-", "--error-logfile", "-", "app:app"]
|
117
README.md
Normal file
117
README.md
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
# Spectra
|
||||||
|
|
||||||
|
> A variation on the masonry grid image gallery with the row alighment constraint removed. Oh, it also has an admin interface so you can set it up and forget it.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Color Analysis**: Automatically extracts color palettes from images to create cohesive galleries
|
||||||
|
- **Smart Thumbnails**: Generates and caches responsive thumbnails in multiple sizes
|
||||||
|
- **EXIF Preservation**: Maintains all photo metadata through processing
|
||||||
|
- **Ownership Verification**: Embeds steganographic proofs in images
|
||||||
|
- **Live Configuration**: Hot-reload config changes without restarts
|
||||||
|
- **Production Ready**: Fully Dockerized with Traefik integration
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Local Development
|
||||||
|
|
||||||
|
Clone the repository
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://git.dws.rip/your-username/spectra
|
||||||
|
cd spectra
|
||||||
|
```
|
||||||
|
|
||||||
|
Set up Python virtual environment
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m venv venv
|
||||||
|
source venv/bin/activate # or venv\Scripts\activate on Windows
|
||||||
|
```
|
||||||
|
|
||||||
|
Install dependencies
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
Create config from template
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp config.example.toml config.toml
|
||||||
|
```
|
||||||
|
|
||||||
|
Run development server
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python app.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Production Deployment
|
||||||
|
|
||||||
|
Create required network
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker network create traefik-public
|
||||||
|
```
|
||||||
|
|
||||||
|
Configure your domain
|
||||||
|
```bash
|
||||||
|
sed -i 's/photos.dws.rip/your.domain.here/g' docker-compose.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
Launch
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Essential Settings
|
||||||
|
```toml
|
||||||
|
[server]
|
||||||
|
host = "0.0.0.0"
|
||||||
|
port = 5000
|
||||||
|
[security]
|
||||||
|
max_upload_size_mb = 80
|
||||||
|
rate_limit = 100 # requests per minute
|
||||||
|
[admin]
|
||||||
|
password = "change-this-password" # Required
|
||||||
|
```
|
||||||
|
|
||||||
|
See `config.example.toml` for all available options.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
spectra/
|
||||||
|
├── app.py # Application entry point
|
||||||
|
├── config.py # Configuration management
|
||||||
|
├── models.py # Database models
|
||||||
|
├── steganography.py # Image verification
|
||||||
|
├── templates/ # Jinja2 templates
|
||||||
|
├── uploads/ # Original images
|
||||||
|
└── thumbnails/ # Generated thumbnails
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Reference
|
||||||
|
|
||||||
|
### Endpoints
|
||||||
|
|
||||||
|
#### Public Endpoints
|
||||||
|
- `GET /` - Main gallery view
|
||||||
|
- `GET /api/images` - Get paginated image list
|
||||||
|
- `GET /verify/<filename>` - Verify image authenticity
|
||||||
|
|
||||||
|
#### Admin Endpoints
|
||||||
|
- `POST /admin/login` - Admin authentication
|
||||||
|
- `POST /admin/upload` - Upload new images
|
||||||
|
- `POST /admin/update_photo/<id>` - Update image metadata
|
||||||
|
- `POST /admin/delete_photo/<id>` - Delete image
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
- `FLASK_ENV`: Set to 'production' in production
|
||||||
|
- `WORKERS`: Number of Gunicorn workers (default: 4)
|
||||||
|
- `PORT`: Override default port (default: 5000)
|
199
app.py
199
app.py
@ -11,6 +11,15 @@ from colorthief import ColorThief
|
|||||||
import colorsys
|
import colorsys
|
||||||
from steganography import embed_message, extract_message
|
from steganography import embed_message, extract_message
|
||||||
import hashlib
|
import hashlib
|
||||||
|
from watchdog.observers import Observer
|
||||||
|
from watchdog.events import FileSystemEventHandler
|
||||||
|
import toml
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import atexit
|
||||||
|
from flask_limiter import Limiter
|
||||||
|
from flask_limiter.util import get_remote_address
|
||||||
|
import secrets
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.secret_key = os.urandom(24)
|
app.secret_key = os.urandom(24)
|
||||||
@ -32,6 +41,81 @@ app.config['MAX_CONTENT_LENGTH'] = 80 * 1024 * 1024 # 80MB limit
|
|||||||
scheduler = BackgroundScheduler()
|
scheduler = BackgroundScheduler()
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
|
|
||||||
|
DEFAULT_CONFIG = {
|
||||||
|
'server': {
|
||||||
|
'host': '0.0.0.0',
|
||||||
|
'port': 5000
|
||||||
|
},
|
||||||
|
'directories': {
|
||||||
|
'upload': 'uploads',
|
||||||
|
'thumbnail': 'thumbnails'
|
||||||
|
},
|
||||||
|
'admin': {
|
||||||
|
'password': 'changeme' # Default password
|
||||||
|
},
|
||||||
|
'appearance': {
|
||||||
|
'accent_color': '#007bff'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def merge_configs(default, user):
|
||||||
|
"""Recursively merge user config with default config"""
|
||||||
|
result = default.copy()
|
||||||
|
for key, value in user.items():
|
||||||
|
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||||
|
result[key] = merge_configs(result[key], value)
|
||||||
|
else:
|
||||||
|
result[key] = value
|
||||||
|
return result
|
||||||
|
|
||||||
|
class ConfigFileHandler(FileSystemEventHandler):
|
||||||
|
def on_modified(self, event):
|
||||||
|
if event.src_path.endswith('config.toml'):
|
||||||
|
global config
|
||||||
|
try:
|
||||||
|
new_config = load_or_create_config()
|
||||||
|
config.update(new_config)
|
||||||
|
app.logger.info("Configuration reloaded successfully")
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Error reloading configuration: {e}")
|
||||||
|
|
||||||
|
def load_or_create_config():
|
||||||
|
config_path = 'config.toml'
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.path.exists(config_path):
|
||||||
|
with open(config_path, 'r') as f:
|
||||||
|
user_config = toml.load(f)
|
||||||
|
else:
|
||||||
|
user_config = {}
|
||||||
|
|
||||||
|
# Merge with defaults
|
||||||
|
final_config = merge_configs(DEFAULT_CONFIG, user_config)
|
||||||
|
|
||||||
|
# Save complete config back to file
|
||||||
|
with open(config_path, 'w') as f:
|
||||||
|
toml.dump(final_config, f)
|
||||||
|
|
||||||
|
return final_config
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
app.logger.error(f"Error loading config: {e}")
|
||||||
|
return DEFAULT_CONFIG.copy()
|
||||||
|
|
||||||
|
def start_config_watcher():
|
||||||
|
observer = Observer()
|
||||||
|
observer.schedule(ConfigFileHandler(), path='.', recursive=False)
|
||||||
|
observer.start()
|
||||||
|
|
||||||
|
# Register cleanup on app shutdown
|
||||||
|
def cleanup():
|
||||||
|
observer.stop()
|
||||||
|
observer.join()
|
||||||
|
|
||||||
|
atexit.register(cleanup)
|
||||||
|
|
||||||
|
start_config_watcher()
|
||||||
|
|
||||||
def allowed_file(filename):
|
def allowed_file(filename):
|
||||||
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
||||||
|
|
||||||
@ -307,6 +391,121 @@ def verify_image(filename):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
return jsonify({'verified': False, 'error': str(e)})
|
return jsonify({'verified': False, 'error': str(e)})
|
||||||
|
|
||||||
|
limiter = Limiter(
|
||||||
|
app=app,
|
||||||
|
key_func=get_remote_address,
|
||||||
|
default_limits=["100 per minute"],
|
||||||
|
storage_uri="memory://"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate a strong secret key at startup
|
||||||
|
app.secret_key = secrets.token_hex(32)
|
||||||
|
|
||||||
|
# Add security headers middleware
|
||||||
|
@app.after_request
|
||||||
|
def add_security_headers(response):
|
||||||
|
response.headers['X-Content-Type-Options'] = 'nosniff'
|
||||||
|
response.headers['X-Frame-Options'] = 'SAMEORIGIN'
|
||||||
|
response.headers['X-XSS-Protection'] = '1; mode=block'
|
||||||
|
response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
|
||||||
|
response.headers['Content-Security-Policy'] = "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline';"
|
||||||
|
return response
|
||||||
|
|
||||||
|
# Add rate limiting to sensitive endpoints
|
||||||
|
@app.route('/admin/login', methods=['POST'])
|
||||||
|
@limiter.limit("5 per minute")
|
||||||
|
def admin_login():
|
||||||
|
if request.method == 'POST':
|
||||||
|
if request.form['password'] == config['admin']['password']:
|
||||||
|
session['logged_in'] = True
|
||||||
|
return redirect(url_for('admin'))
|
||||||
|
else:
|
||||||
|
flash('Invalid password')
|
||||||
|
return render_template('admin_login.html', accent_color=config['appearance']['accent_color'])
|
||||||
|
|
||||||
|
@app.route('/admin/upload', methods=['POST'])
|
||||||
|
@limiter.limit("10 per minute")
|
||||||
|
def admin_upload():
|
||||||
|
if 'logged_in' not in session:
|
||||||
|
return redirect(url_for('admin_login'))
|
||||||
|
|
||||||
|
if 'file' not in request.files:
|
||||||
|
flash('No file part')
|
||||||
|
return redirect(url_for('admin'))
|
||||||
|
|
||||||
|
file = request.files['file']
|
||||||
|
if file.filename == '':
|
||||||
|
flash('No selected file')
|
||||||
|
return redirect(url_for('admin'))
|
||||||
|
|
||||||
|
if file and allowed_file(file.filename):
|
||||||
|
filename = secure_filename(file.filename)
|
||||||
|
file_path = os.path.join(app.config['UPLOAD_FOLDER'], filename)
|
||||||
|
file.save(file_path)
|
||||||
|
|
||||||
|
# Extract EXIF data
|
||||||
|
exif = None
|
||||||
|
exifraw = None
|
||||||
|
with Image.open(file_path) as img:
|
||||||
|
exifraw = img.info['exif']
|
||||||
|
width, height = img.size
|
||||||
|
exif = {
|
||||||
|
ExifTags.TAGS[k]: v
|
||||||
|
for k, v in img._getexif().items()
|
||||||
|
if k in ExifTags.TAGS
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate a unique key for the image
|
||||||
|
unique_key = hashlib.sha256(f"{filename}{datetime.now().isoformat()}".encode()).hexdigest()[:16]
|
||||||
|
|
||||||
|
# Embed the unique key into the image
|
||||||
|
try:
|
||||||
|
embed_message(file_path, unique_key, exifraw)
|
||||||
|
except ValueError as e:
|
||||||
|
flash(f"Error embedding key: {str(e)}")
|
||||||
|
os.remove(file_path)
|
||||||
|
return redirect(url_for('admin'))
|
||||||
|
|
||||||
|
|
||||||
|
# Generate thumbnails
|
||||||
|
generate_thumbnails(filename)
|
||||||
|
|
||||||
|
# Get image dimensions
|
||||||
|
with Image.open(file_path) as img:
|
||||||
|
width, height = img.size
|
||||||
|
|
||||||
|
exposure_time = exif['ExposureTime']
|
||||||
|
if isinstance(exposure_time, tuple):
|
||||||
|
exposure_fraction = f"{exposure_time[0]}/{exposure_time[1]}"
|
||||||
|
else:
|
||||||
|
exposure_fraction = f"1/{int(1/float(exposure_time))}"
|
||||||
|
|
||||||
|
# Create database entry
|
||||||
|
db_session = DBSession()
|
||||||
|
new_photo = Photo(
|
||||||
|
input_filename=filename,
|
||||||
|
thumbnail_filename=f"{os.path.splitext(filename)[0]}/256_{filename}",
|
||||||
|
focal_length=str(exif.get('FocalLengthIn35mmFilm', exif.get('FocalLength', ''))),
|
||||||
|
aperture=str(exif.get('FNumber', '')),
|
||||||
|
shutter_speed=exposure_fraction,
|
||||||
|
date_taken=datetime.strptime(str(exif.get('DateTime', '1970:01:01 00:00:00')), '%Y:%m:%d %H:%M:%S'),
|
||||||
|
iso=int(exif.get('ISOSpeedRatings', 0)),
|
||||||
|
orientation=int(exif.get('Orientation', 1)),
|
||||||
|
width=width,
|
||||||
|
height=height,
|
||||||
|
highlight_color=get_highlight_color(THUMBNAIL_FOLDER + f"/{os.path.splitext(filename)[0]}/256_{filename}"),
|
||||||
|
unique_key=unique_key
|
||||||
|
)
|
||||||
|
db_session.add(new_photo)
|
||||||
|
db_session.commit()
|
||||||
|
db_session.close()
|
||||||
|
|
||||||
|
flash('File uploaded successfully')
|
||||||
|
return redirect(url_for('admin'))
|
||||||
|
|
||||||
|
flash('Invalid file type')
|
||||||
|
return redirect(url_for('admin'))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
app.run(
|
app.run(
|
||||||
debug=True,
|
debug=True,
|
||||||
|
20
config.example.toml
Normal file
20
config.example.toml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
[server]
|
||||||
|
host = "0.0.0.0"
|
||||||
|
port = 5000
|
||||||
|
|
||||||
|
[directories]
|
||||||
|
upload = "/app/uploads"
|
||||||
|
thumbnail = "/app/thumbnails"
|
||||||
|
|
||||||
|
[admin]
|
||||||
|
# Change this password!
|
||||||
|
password = "changeme"
|
||||||
|
|
||||||
|
[appearance]
|
||||||
|
accent_color = "#007bff"
|
||||||
|
|
||||||
|
[security]
|
||||||
|
# Add these security settings
|
||||||
|
max_upload_size_mb = 80
|
||||||
|
allowed_extensions = ["jpg", "jpeg", "png", "gif"]
|
||||||
|
rate_limit = 100 # requests per minute
|
34
docker-compose.yml
Normal file
34
docker-compose.yml
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
build: .
|
||||||
|
labels:
|
||||||
|
- "traefik.enable=true"
|
||||||
|
- "traefik.http.routers.photos.rule=Host(`photos.dws.rip`)"
|
||||||
|
- "traefik.http.services.photos.loadbalancer.server.port=5000"
|
||||||
|
- "traefik.http.routers.photos.tls=true"
|
||||||
|
- "traefik.http.routers.photos.tls.certresolver=default"
|
||||||
|
volumes:
|
||||||
|
- ./uploads:/app/uploads
|
||||||
|
- ./thumbnails:/app/thumbnails
|
||||||
|
- ./config.toml:/app/config.toml
|
||||||
|
- ./photos.db:/app/photos.db
|
||||||
|
environment:
|
||||||
|
- PYTHONUNBUFFERED=1
|
||||||
|
- FLASK_ENV=production
|
||||||
|
- WORKERS=4
|
||||||
|
restart: unless-stopped
|
||||||
|
networks:
|
||||||
|
- traefik-public
|
||||||
|
- default
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:5000/"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 40s
|
||||||
|
|
||||||
|
networks:
|
||||||
|
traefik-public:
|
||||||
|
external: true
|
145
main.py
145
main.py
@ -1,145 +0,0 @@
|
|||||||
from flask import Flask, jsonify, request, send_from_directory, render_template
|
|
||||||
import os
|
|
||||||
from PIL import Image, ExifTags
|
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import piexif
|
|
||||||
import io
|
|
||||||
import random
|
|
||||||
from colorthief import ColorThief
|
|
||||||
import colorsys
|
|
||||||
|
|
||||||
app = Flask(__name__)
|
|
||||||
|
|
||||||
IMAGE_FOLDER = '/home/dubey/projects/photoportfolio/pythonserver/images/'
|
|
||||||
THUMBS_FOLDER = '/home/dubey/projects/photoportfolio/pythonserver/thumbs/'
|
|
||||||
IMAGES_PER_PAGE = 5
|
|
||||||
THUMBNAIL_SIZES = [256, 512, 768, 1024, 1536, 2048]
|
|
||||||
|
|
||||||
scheduler = BackgroundScheduler()
|
|
||||||
scheduler.start()
|
|
||||||
|
|
||||||
def generate_thumbnails():
|
|
||||||
for filename in os.listdir(IMAGE_FOLDER):
|
|
||||||
if filename.lower().endswith(('.png', '.jpg', '.jpeg', '.gif')):
|
|
||||||
original_path = os.path.join(IMAGE_FOLDER, filename)
|
|
||||||
for size in THUMBNAIL_SIZES:
|
|
||||||
thumb_path = os.path.join(THUMBS_FOLDER, f"{size}_{filename}")
|
|
||||||
if not os.path.exists(thumb_path):
|
|
||||||
with Image.open(original_path) as img:
|
|
||||||
# Extract EXIF data
|
|
||||||
exif_data = None
|
|
||||||
if "exif" in img.info:
|
|
||||||
exif_data = img.info["exif"]
|
|
||||||
|
|
||||||
# Resize image
|
|
||||||
img.thumbnail((size, size), Image.LANCZOS)
|
|
||||||
|
|
||||||
# Save image with EXIF data
|
|
||||||
if exif_data:
|
|
||||||
img.save(thumb_path, exif=exif_data, optimize=True, quality=85)
|
|
||||||
else:
|
|
||||||
img.save(thumb_path, optimize=True, quality=85)
|
|
||||||
|
|
||||||
scheduler.add_job(generate_thumbnails, 'interval', minutes=5)
|
|
||||||
scheduler.add_job(generate_thumbnails, 'date', run_date=datetime.now() + timedelta(seconds=1)) # Run once at startup
|
|
||||||
|
|
||||||
|
|
||||||
def get_highlight_color(image_path):
|
|
||||||
color_thief = ColorThief(image_path)
|
|
||||||
palette = color_thief.get_palette(color_count=6, quality=1)
|
|
||||||
|
|
||||||
# Convert RGB to HSV and find the color with the highest saturation
|
|
||||||
highlight_color = max(palette, key=lambda rgb: colorsys.rgb_to_hsv(*rgb)[1])
|
|
||||||
|
|
||||||
return '#{:02x}{:02x}{:02x}'.format(*highlight_color)
|
|
||||||
|
|
||||||
def get_image_info(filename):
|
|
||||||
path = os.path.join(IMAGE_FOLDER, filename)
|
|
||||||
thumb_path = os.path.join(THUMBS_FOLDER, f"256_{filename}")
|
|
||||||
exif = None
|
|
||||||
with Image.open(path) as img:
|
|
||||||
width, height = img.size
|
|
||||||
exif = {
|
|
||||||
ExifTags.TAGS[k]: v
|
|
||||||
for k, v in img._getexif().items()
|
|
||||||
if k in ExifTags.TAGS
|
|
||||||
}
|
|
||||||
|
|
||||||
if str(exif['Orientation']) == "6" or str(exif['Orientation']) == "8":
|
|
||||||
width, height = height, width
|
|
||||||
|
|
||||||
exposure_time = exif['ExposureTime']
|
|
||||||
if isinstance(exposure_time, tuple):
|
|
||||||
exposure_fraction = f"{exposure_time[0]}/{exposure_time[1]}"
|
|
||||||
else:
|
|
||||||
exposure_fraction = f"1/{int(1/float(exposure_time))}"
|
|
||||||
|
|
||||||
date_obj = datetime.strptime(exif['DateTime'], '%Y:%m:%d %H:%M:%S')
|
|
||||||
date = date_obj.strftime('%y %m %d') # Format: YY MM DD
|
|
||||||
technical_info = f"{exif['FocalLengthIn35mmFilm']}MM | F/{exif['FNumber']} | {exposure_fraction} | ISO{exif['ISOSpeedRatings']}"
|
|
||||||
|
|
||||||
factor = random.randint(2, 3)
|
|
||||||
if height < 4000 or width < 4000:
|
|
||||||
factor = 1
|
|
||||||
|
|
||||||
highlight_color = get_highlight_color(thumb_path)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'imgSrc': f'/thumbs/1536_{filename}',
|
|
||||||
'fullSizeImgSrc': f'/images/{filename}',
|
|
||||||
'date': date,
|
|
||||||
'technicalInfo': technical_info,
|
|
||||||
'width': width/factor,
|
|
||||||
'height': height/factor,
|
|
||||||
'highlightColor': highlight_color
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_image_taken_date(filename):
|
|
||||||
path = os.path.join(IMAGE_FOLDER, filename)
|
|
||||||
with Image.open(path) as img:
|
|
||||||
exif = {
|
|
||||||
ExifTags.TAGS[k]: v
|
|
||||||
for k, v in img._getexif().items()
|
|
||||||
if k in ExifTags.TAGS
|
|
||||||
}
|
|
||||||
date_str = exif.get('DateTime', exif.get('DateTimeOriginal', ''))
|
|
||||||
if date_str:
|
|
||||||
return datetime.strptime(date_str, '%Y:%m:%d %H:%M:%S')
|
|
||||||
return datetime.fromtimestamp(os.path.getmtime(path)) # Fallback to file modification time
|
|
||||||
|
|
||||||
@app.route('/api/images')
|
|
||||||
def get_images():
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
all_images = sorted(
|
|
||||||
[f for f in os.listdir(IMAGE_FOLDER) if f.lower().endswith(('.png', '.jpg', '.jpeg', '.gif'))],
|
|
||||||
key=get_image_taken_date,
|
|
||||||
reverse=True
|
|
||||||
)
|
|
||||||
start = (page - 1) * IMAGES_PER_PAGE
|
|
||||||
end = start + IMAGES_PER_PAGE
|
|
||||||
page_images = all_images[start:end]
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'images': [get_image_info(img) for img in page_images],
|
|
||||||
'hasMore': end < len(all_images)
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/images/<path:filename>')
|
|
||||||
def serve_image(filename):
|
|
||||||
return send_from_directory(IMAGE_FOLDER, filename)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/')
|
|
||||||
def index():
|
|
||||||
return render_template('index.html')
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/thumbs/<path:filename>')
|
|
||||||
def serve_thumbnail(filename):
|
|
||||||
return send_from_directory(THUMBS_FOLDER, filename)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
app.run(debug=True, port=5001, host='0.0.0.0')
|
|
23
requirements.txt
Normal file
23
requirements.txt
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
APScheduler==3.10.4
|
||||||
|
blinker==1.8.2
|
||||||
|
click==8.1.7
|
||||||
|
colorthief==0.2.1
|
||||||
|
exif==1.6.0
|
||||||
|
Flask==3.0.3
|
||||||
|
greenlet==3.1.1
|
||||||
|
itsdangerous==2.2.0
|
||||||
|
Jinja2==3.1.4
|
||||||
|
MarkupSafe==3.0.1
|
||||||
|
numpy==2.1.2
|
||||||
|
piexif==1.1.3
|
||||||
|
pillow==11.0.0
|
||||||
|
plum-py==0.8.7
|
||||||
|
pytz==2024.2
|
||||||
|
six==1.16.0
|
||||||
|
SQLAlchemy==2.0.36
|
||||||
|
toml==0.10.2
|
||||||
|
typing_extensions==4.12.2
|
||||||
|
tzlocal==5.2
|
||||||
|
watchdog==6.0.0
|
||||||
|
Werkzeug==3.0.4
|
||||||
|
gunicorn
|
Loading…
Reference in New Issue
Block a user