Fix so it's possible to get multiple reports per day

Also translate comments to english
This commit is contained in:
Jeena 2025-08-11 22:41:08 +09:00
parent 76676839f0
commit 47349e68cd

View file

@ -1,147 +1,130 @@
#!/usr/bin/env python3
#thanks to goodhobak from https://www.clien.net/
#requre: requests Pillow piexif
#use:
#1. get json from api in webpage
#2. save json
import json
from pathlib import Path
import os
import requests
from PIL import Image
from PIL.ExifTags import TAGS, GPSTAGS
import sys
import json
import datetime
import requests
from pathlib import Path
from PIL import Image
import piexif
from xml.etree import ElementTree as ET
from dotenv import load_dotenv
load_dotenv()
STORAGE_PATH = os.getenv("KIDSNOTE_STORAGE_PATH")
if not STORAGE_PATH:
print("Error: KIDSNOTE_STORAGE_PATH is not set.")
sys.exit(1)
def convert_to_degrees(value):
"""Convert decimal coordinate to degrees, minutes, and seconds tuple."""
degrees = int(value)
minutes = int((value - degrees) * 60)
seconds = (value - degrees - minutes / 60) * 3600
seconds_numerator = int(seconds * 100)
seconds_denominator = 100
return ((degrees, 1), (minutes, 1), (int(seconds * 100), 100))
return ((degrees, 1), (minutes, 1), (seconds_numerator, seconds_denominator))
def add_exif_data(image_path, title, content, location):
# Load the image
def add_exif_data(image_path, title, content, location_str=None):
try:
img = Image.open(image_path)
# Check if the image has 'exif' data
if 'exif' in img.info:
exif_dict = piexif.load(img.info['exif'])
else:
exif_dict = {"0th": {}, "Exif": {}, "GPS": {}, "1st": {}, "thumbnail": None}
# GPS coordinates
latitude, longitude = map(float, location.split(','))
if location_str:
try:
latitude, longitude = map(float, location_str.split(','))
exif_dict['GPS'][piexif.GPSIFD.GPSLatitudeRef] = 'N' if latitude >= 0 else 'S'
exif_dict['GPS'][piexif.GPSIFD.GPSLongitudeRef] = 'E' if longitude >= 0 else 'W'
exif_dict['GPS'][piexif.GPSIFD.GPSLatitude] = convert_to_degrees(abs(latitude))
exif_dict['GPS'][piexif.GPSIFD.GPSLongitude] = convert_to_degrees(abs(longitude))
except Exception as e:
print(f"Invalid location format '{location_str}': {e}")
# User comment
user_comment = f"Title: {title}\nContent: {content}"
encoded_comment = user_comment.encode('utf-8')
exif_dict['0th'][piexif.ImageIFD.ImageDescription] = encoded_comment
comment = f"Title: {title}\nContent: {content}"
encoded_comment = b'ASCII\x00\x00\x00' + comment.encode('utf-8')
exif_dict['Exif'][piexif.ExifIFD.UserComment] = encoded_comment
# Convert EXIF data to bytes and save the image
exif_bytes = piexif.dump(exif_dict)
img.save(image_path, exif=exif_bytes)
# Close the image
img.close()
except Exception as e:
print(f"EXIF error for {image_path}: {e}")
def create_xmp_data(title, content, location):
xmp_template = f"""
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
<rdf:Description rdf:about=""
xmlns:dc="http://purl.org/dc/elements/1.1/">
<dc:title>{title}</dc:title>
<dc:description>{content}</dc:description>
<dc:location>{location}</dc:location>
</rdf:Description>
</rdf:RDF>
"""
return xmp_template
def add_xmp_data(image_path, title, content, location):
# Open the image
def get_creation_datetime(image_path):
try:
img = Image.open(image_path)
exif_bytes = img.info.get('exif')
if not exif_bytes:
return None
exif_dict = piexif.load(exif_bytes)
dt_bytes = exif_dict['Exif'].get(piexif.ExifIFD.DateTimeOriginal)
if not dt_bytes:
return None
dt_str = dt_bytes.decode('utf-8') # e.g. "2025:07:12 14:32:45"
dt = datetime.datetime.strptime(dt_str, "%Y:%m:%d %H:%M:%S")
return dt
except Exception:
return None
xmp_data = create_xmp_data(title, content, location)
xmp_bytes = xmp_data.encode('utf-8')
# Check if the image has existing metadata and append XMP data
if "APP1" in img.info:
existing_metadata = img.info["APP1"]
new_metadata = existing_metadata + b'\n' + xmp_bytes
else:
new_metadata = xmp_bytes
# Save the image with new metadata
img.save(image_path, "jpeg", exif=new_metadata)
# Close the image
img.close()
# 파일 읽기
with open("report.json", "r", encoding='utf-8') as file:
data = json.load(file)
# 앨범 처리
# Group reports by date
reports_by_date = {}
for report in data['results']:
# 폴더 생성
date = datetime.datetime.strptime(report['created'], "%Y-%m-%dT%H:%M:%S.%fZ")
date_str = date.date()
folder_name = Path(STORAGE_PATH).expanduser() / f"{date.year:04d}" / f"{date.month:02d}" / f"{date.day:02d}"
folder_name.mkdir(parents=True, exist_ok=True)
date = datetime.datetime.strptime(report['created'], "%Y-%m-%dT%H:%M:%S.%fZ").date()
reports_by_date.setdefault(date, []).append(report)
# 앨범 설명 저장
description_path = os.path.join(folder_name, "report-description.txt")
if os.path.exists(description_path):
print (f"'{date_str}' exists");
for date, reports in sorted(reports_by_date.items()):
folder = Path(STORAGE_PATH).expanduser() / f"{date.year:04d}" / f"{date.month:02d}" / f"{date.day:02d}"
folder.mkdir(parents=True, exist_ok=True)
for report_index, report in enumerate(reports, start=1):
desc_path = folder / f"report-{report_index}-description.txt"
if desc_path.exists():
print(f"'{desc_path.name}' exists")
continue
with open(os.path.join(description_path), "w", encoding='utf-8') as file:
file.write(f"Title: {report['class_name']}-{report['child_name']}\n")
file.write(f"Weather: {report['weather']}\n")
file.write(f"Content: {report['content']}\n")
# 비디오 처리
if report['attached_video']:
video_url = report['attached_video']
video_response = requests.get(video_url)
video_path = os.path.join(folder_name, f"V_{report['id']}.MP4")
with open(video_path, "wb") as file:
file.write(video_response.content)
with open(desc_path, "w", encoding='utf-8') as f:
f.write(f"Title: {report['class_name']}-{report['child_name']}\n")
f.write(f"Weather: {report['weather']}\n")
f.write(f"Content: {report['content']}\n")
# 이미지 처리
for index, image in enumerate(report['attached_images'], start=1):
image_url = image['original']
image_response = requests.get(image_url)
image_path = os.path.join(folder_name, f"P_{date_str}-{index}.jpg")
with open(image_path, "wb") as file:
file.write(image_response.content)
if report.get('attached_video'):
try:
# EXIF 데이터 추가
add_exif_data(image_path, report['child_name'], report['content'], "55.55555, 555.5555")
# XMP 데이터 추가
#add_xmp_data(image_path, report['child_name'], report['content'], "55.5555, 555.555555")
except:
print(f"'{folder_name}' : exif error")
print(f"Report '{folder_name}' processed.")
r = requests.get(report['attached_video'])
r.raise_for_status()
with open(folder / f"V_{report_index}.MP4", "wb") as f:
f.write(r.content)
except requests.RequestException as e:
print(f"Video download failed: {e}")
for img_index, image in enumerate(report.get('attached_images', []), start=1):
try:
r = requests.get(image['original'])
r.raise_for_status()
temp_path = folder / f"temp_{report_index}_{img_index}.jpg"
with open(temp_path, "wb") as f:
f.write(r.content)
add_exif_data(temp_path, report['child_name'], report['content'], report.get('location'))
dt = get_creation_datetime(temp_path)
if dt:
date_str = dt.strftime("%Y-%m-%d_%H-%M")
else:
date_str = date.strftime("%Y-%m-%d")
final_name = f"P{report_index}_{date_str}.jpg"
final_path = folder / final_name
temp_path.rename(final_path)
except requests.RequestException as e:
print(f"Image download failed: {e}")
print(f"Report {report_index} (ID: {report['id']}) saved in '{folder}'")
print("All reports have been processed.")