Compare commits
3 commits
c6936f8f52
...
0ada2d4858
| Author | SHA1 | Date | |
|---|---|---|---|
| 0ada2d4858 | |||
| d9a45bd317 | |||
| ef8672678a |
8 changed files with 714 additions and 793 deletions
2
Pipfile
2
Pipfile
|
|
@ -16,4 +16,4 @@ selenium-wire = "*"
|
|||
[dev-packages]
|
||||
|
||||
[requires]
|
||||
python_version = "3.13"
|
||||
python_version = "3"
|
||||
|
|
|
|||
1274
Pipfile.lock
generated
1274
Pipfile.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -1,183 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
|
||||
#thanks to goodhobak from https://www.clien.net/
|
||||
#requre: requests Pillow piexif
|
||||
#use:
|
||||
#1. get json from api in webpage
|
||||
#2. save json
|
||||
#commit : 파일의 생성일, 수정일 수정
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
from PIL import Image
|
||||
import piexif
|
||||
from datetime import datetime, timedelta
|
||||
from xml.etree import ElementTree as ET
|
||||
from datetime import datetime
|
||||
import os
|
||||
import pywintypes, win32file, win32con
|
||||
|
||||
def convert_to_degrees(value):
|
||||
"""Convert decimal coordinate to degrees, minutes, and seconds tuple."""
|
||||
degrees = int(value)
|
||||
minutes = int((value - degrees) * 60)
|
||||
seconds = (value - degrees - minutes / 60) * 3600
|
||||
seconds_numerator = int(seconds * 100)
|
||||
seconds_denominator = 100
|
||||
|
||||
return ((degrees, 1), (minutes, 1), (seconds_numerator, seconds_denominator))
|
||||
|
||||
def add_exif_data(image_path, title, content, modified_time):
|
||||
# Load the image
|
||||
img = Image.open(image_path)
|
||||
|
||||
# Check if the image has existing EXIF data
|
||||
if 'exif' in img.info:
|
||||
exif_dict = piexif.load(img.info['exif'])
|
||||
else:
|
||||
exif_dict = {"0th": {}, "Exif": {}, "GPS": {}, "1st": {}, "thumbnail": None}
|
||||
|
||||
# Prepare the formatted time
|
||||
formatted_time = modified_time.strftime("%Y:%m:%d %H:%M:%S")
|
||||
|
||||
# Check if DateTimeOriginal exists and if it matches the intended value
|
||||
existing_date_time_original = exif_dict['Exif'].get(piexif.ExifIFD.DateTimeOriginal, None)
|
||||
if existing_date_time_original and existing_date_time_original != formatted_time:
|
||||
#print(f"Skipping {image_path}: DateTimeOriginal does not match the intended value.")
|
||||
img.close() # Close the image and skip further processing
|
||||
return
|
||||
|
||||
# Update the datetime fields
|
||||
exif_dict['Exif'][piexif.ExifIFD.DateTimeOriginal] = formatted_time
|
||||
exif_dict['Exif'][piexif.ExifIFD.DateTimeDigitized] = formatted_time
|
||||
exif_dict['0th'][piexif.ImageIFD.DateTime] = formatted_time
|
||||
|
||||
# User comment
|
||||
user_comment = f"Title: {title}\nContent: {content}"
|
||||
encoded_comment = user_comment.encode('utf-8')
|
||||
exif_dict['0th'].setdefault(piexif.ImageIFD.ImageDescription, encoded_comment)
|
||||
|
||||
# Convert EXIF data to bytes and save the image
|
||||
exif_bytes = piexif.dump(exif_dict)
|
||||
img.save(image_path, exif=exif_bytes)
|
||||
|
||||
# Close the image
|
||||
img.close()
|
||||
|
||||
|
||||
def add_xmp_data(image_path, title, content, modified_time):
|
||||
# Open the image
|
||||
img = Image.open(image_path)
|
||||
|
||||
# Create XMP data
|
||||
xmp_template = f"""
|
||||
<x:xmpmeta xmlns:x="adobe:ns:meta/">
|
||||
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
|
||||
<rdf:Description rdf:about=""
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:xmp="http://ns.adobe.com/xap/1.0/">
|
||||
<dc:title>{title}</dc:title>
|
||||
<dc:description>{content}</dc:description>
|
||||
<xmp:ModifyDate>{modified_time.strftime("%Y-%m-%dT%H:%M:%S")}</xmp:ModifyDate>
|
||||
<xmp:CreateDate>{modified_time.strftime("%Y-%m-%dT%H:%M:%S")}</xmp:CreateDate>
|
||||
</rdf:Description>
|
||||
</rdf:RDF>
|
||||
</x:xmpmeta>
|
||||
"""
|
||||
xmp_data = bytes(xmp_template, 'utf-8')
|
||||
|
||||
# Check if the image has existing XMP data and append the new data
|
||||
if "APP1" in img.info and b'http://ns.adobe.com/xap/1.0/' in img.info["APP1"]:
|
||||
existing_xmp_index = img.info["APP1"].find(b'http://ns.adobe.com/xap/1.0/')
|
||||
if existing_xmp_index != -1:
|
||||
# Extract existing XMP data and create a combined XMP
|
||||
existing_xmp_data = img.info["APP1"][existing_xmp_index:]
|
||||
combined_xmp_data = existing_xmp_data.strip(b' ') + b' ' + xmp_data
|
||||
else:
|
||||
combined_xmp_data = xmp_data
|
||||
else:
|
||||
combined_xmp_data = xmp_data
|
||||
|
||||
# Save the image with new metadata
|
||||
img.save(image_path, "jpeg", exif=img.info.get('exif'), xmp=combined_xmp_data)
|
||||
|
||||
# Close the image
|
||||
img.close()
|
||||
|
||||
def change_file_times(filename, created_time, modified_time):
|
||||
ctime = pywintypes.Time(created_time)
|
||||
mtime = pywintypes.Time(modified_time)
|
||||
|
||||
handle = win32file.CreateFile(filename, win32con.GENERIC_WRITE, 0, None, win32con.OPEN_EXISTING, 0, None)
|
||||
win32file.SetFileTime(handle, ctime, None, mtime)
|
||||
handle.Close()
|
||||
|
||||
|
||||
# 파일 읽기
|
||||
with open("album.json", "r", encoding='utf-8') as file:
|
||||
data = json.load(file)
|
||||
|
||||
# 앨범 처리
|
||||
for album in data['results']:
|
||||
# 폴더 생성
|
||||
date_str = album['created'][:10] # "YYYY-MM-DD"
|
||||
month_str = album['created'][:7] # "YYYY-MM"
|
||||
|
||||
folder_name = f"Album\Album-{month_str}\{date_str}"
|
||||
os.makedirs(folder_name, exist_ok=True)
|
||||
|
||||
# 앨범 설명 저장
|
||||
description_path = os.path.join(folder_name, "album-description.txt")
|
||||
if os.path.exists(description_path):
|
||||
#print (f"'{date_str}' is exist");
|
||||
continue
|
||||
|
||||
created_time_utc = datetime.strptime(album['created'], "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
modified_time_utc = datetime.strptime(album['modified'], "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
|
||||
# Add 9 hours to convert UTC to Seoul time (UTC+09:00)
|
||||
seoul_offset = timedelta(hours=9)
|
||||
created_time = created_time_utc + seoul_offset
|
||||
modified_time = modified_time_utc + seoul_offset
|
||||
|
||||
file = open(os.path.join(description_path), "w", encoding='utf-8')
|
||||
file.write(f"Title: {album['title']}\n")
|
||||
file.write(f"Content: {album['content']}\n")
|
||||
file.write(f"json: {album}\n")
|
||||
file.close()
|
||||
# Change times
|
||||
change_file_times(description_path, created_time, modified_time)
|
||||
|
||||
# 비디오 처리
|
||||
if album['attached_video']:
|
||||
video_url = album['attached_video']
|
||||
video_response = requests.get(video_url)
|
||||
video_path = os.path.join(folder_name, f"KidsNote_Vidio_{album['id']}.MP4")
|
||||
file = open(video_path, "wb");
|
||||
file.write(video_response.content)
|
||||
file.close();
|
||||
# Change times
|
||||
change_file_times(video_path, created_time, modified_time)
|
||||
|
||||
# 이미지 처리
|
||||
for index, image in enumerate(album['attached_images'], start=1):
|
||||
image_url = image['original']
|
||||
image_response = requests.get(image_url)
|
||||
image_path = os.path.join(folder_name, f"KidsNote_Photo_{date_str}-{index}.jpg")
|
||||
file = open(image_path, "wb")
|
||||
file.write(image_response.content)
|
||||
file.close();
|
||||
# Change times
|
||||
change_file_times(image_path, created_time, modified_time)
|
||||
|
||||
# EXIF 데이터 추가
|
||||
try:
|
||||
add_exif_data(image_path, album['title'], album['content'], modified_time)
|
||||
add_xmp_data(image_path, album['title'], album['content'], modified_time)
|
||||
except Exception as e:
|
||||
print(f"EXIF error for '{folder_name}': {e}")
|
||||
|
||||
print(f"Album '{folder_name}' processed.")
|
||||
|
||||
print("All albums have been processed.")
|
||||
|
|
@ -1,12 +1,14 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
import re
|
||||
import requests
|
||||
from dotenv import load_dotenv
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.chrome.options import Options
|
||||
from selenium.webdriver.support.ui import WebDriverWait
|
||||
from selenium.webdriver.support import expected_conditions as EC
|
||||
from seleniumwire import webdriver
|
||||
|
||||
load_dotenv()
|
||||
|
|
@ -27,6 +29,13 @@ def extract_child_id_from_requests(requests) -> str | None:
|
|||
return match.group(1)
|
||||
return None
|
||||
|
||||
def extract_enrollment_from_requests(requests) -> str | None:
|
||||
"""Extract the X-ENROLLMENT header value from intercepted API requests."""
|
||||
for request in requests:
|
||||
if '/api/' in request.url and request.headers.get('X-ENROLLMENT'):
|
||||
return request.headers['X-ENROLLMENT']
|
||||
return None
|
||||
|
||||
# Use headless browser if you don't need to see it
|
||||
chrome_options = Options()
|
||||
chrome_options.add_argument("--headless=new")
|
||||
|
|
@ -35,24 +44,34 @@ chrome_options.add_argument("--no-sandbox")
|
|||
driver = webdriver.Chrome(options=chrome_options)
|
||||
driver.get(login_url)
|
||||
|
||||
# Wait for page to load
|
||||
time.sleep(2)
|
||||
# Wait for login form to be ready
|
||||
wait = WebDriverWait(driver, 30)
|
||||
wait.until(EC.element_to_be_clickable((By.NAME, "username")))
|
||||
|
||||
# Fill in login form
|
||||
driver.find_element(By.NAME, "username").send_keys(USERNAME)
|
||||
driver.find_element(By.NAME, "password").send_keys(PASSWORD)
|
||||
driver.find_element(By.CSS_SELECTOR, "button[type='submit']").click()
|
||||
|
||||
# Wait for login to process (adjust if needed)
|
||||
time.sleep(4)
|
||||
# Wait until the browser makes an authenticated API call (child reports endpoint)
|
||||
wait.until(lambda d: extract_child_id_from_requests(d.requests) is not None)
|
||||
|
||||
child_id = extract_child_id_from_requests(driver.requests)
|
||||
enrollment = extract_enrollment_from_requests(driver.requests)
|
||||
report_url = f"https://www.kidsnote.com/api/v1_2/children/{child_id}/reports/?page_size=5000"
|
||||
|
||||
# Extract cookies
|
||||
cookies = driver.get_cookies()
|
||||
driver.quit()
|
||||
|
||||
if not child_id:
|
||||
print("Error: could not determine child ID from intercepted requests.")
|
||||
sys.exit(1)
|
||||
|
||||
if not enrollment:
|
||||
print("Error: could not extract X-ENROLLMENT header from intercepted requests.")
|
||||
sys.exit(1)
|
||||
|
||||
with requests.Session() as session:
|
||||
# Convert cookies for requests
|
||||
for cookie in cookies:
|
||||
|
|
@ -72,7 +91,7 @@ with requests.Session() as session:
|
|||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Site": "same-origin",
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:138.0) Gecko/20100101 Firefox/138.0",
|
||||
"X-ENROLLMENT": "16417613",
|
||||
"X-ENROLLMENT": enrollment,
|
||||
}
|
||||
|
||||
report_response = session.get(report_url, headers=headers)
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ for date, reports in sorted(reports_by_date.items()):
|
|||
except requests.RequestException as e:
|
||||
print(f"Image download failed: {e}")
|
||||
|
||||
print(f"Report {report_index} (ID: {report['id']}) saved in '{folder}'")
|
||||
print(f"Report {report_index} (ID: {report['id']}) saved in '{folder}'")
|
||||
|
||||
print("All reports have been processed.")
|
||||
|
||||
|
|
|
|||
4
run.sh
4
run.sh
|
|
@ -1,5 +1,3 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
pipenv run ./get_report.py
|
||||
pipenv run ./report_json_down.py
|
||||
|
||||
systemctl --user start kidsnote
|
||||
|
|
|
|||
6
systemd/kidsnote-fail-notify.service
Normal file
6
systemd/kidsnote-fail-notify.service
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
[Unit]
|
||||
Description=Notify Kidsnote failure
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/curl -fsS --retry 3 "https://example.net/api/push/xxxxxx?status=down&msg=Failed"
|
||||
|
|
@ -1,9 +1,12 @@
|
|||
[Unit]
|
||||
Description=Download Kidsnote reports and JSON
|
||||
Wants=kidsnote.timer
|
||||
# OnFailure=kidsnote-fail-notify.service
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/jeena/Projects/kidsnote-backup
|
||||
WorkingDirectory=/home/user/kidsnote-backup
|
||||
ExecStart=/usr/bin/pipenv run ./get_report.py
|
||||
ExecStart=/usr/bin/pipenv run ./report_json_down.py
|
||||
ExecStart=/usr/bin/rm report.json
|
||||
# ExecStartPost=/usr/bin/curl -fsS --retry 3 "https://example.com/api/push/xxxxxx?status=up&msg=OK&ping="
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue