from fastapi import FastAPI, File, UploadFile, Depends, BackgroundTasks from fastapi.responses import JSONResponse from fastapi.security import HTTPBasic, HTTPBasicCredentials from fastapi.middleware.cors import CORSMiddleware # CORS from secrets import compare_digest from datetime import datetime from uuid import uuid4 import sqlite3 import zipfile # Global settings of this program # ./config.py from config import ( DB_LOCATION, DATA_LOCATION, DEST_SHRUNK, DEST_ORIGINAL, CRED_USERNAME, CRED_PASSWORD, ) # our own util for photo upload and processing from util import import_photos as iph # Initialization logic app = FastAPI() security = HTTPBasic() iph.check_database(database_path=DB_LOCATION) con = sqlite3.connect(DB_LOCATION) con.row_factory = sqlite3.Row cur = con.cursor() # NB! single is enough for now, we might require multiple later origins = [ # CORS "*", ] app.add_middleware( # CORS CORSMiddleware, allow_origins=origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) @app.get("/new_session", responses={503: {"description": "Unable to initiate session"}}) async def new_session(): """Start a new session""" # add session to the database time = datetime.utcnow().replace(microsecond=0).isoformat() tries = 3 # something is very wrong with our random, if we miss 3 times for i in range(tries): try: # generate a cookie cookie = uuid4().hex cur.execute( """INSERT INTO sessions(cookie, time) VALUES(:cookie, :time) """, {"cookie": cookie, "time": time}, ) con.commit() except sqlite3.IntegrityError as e: if i < tries - 1 and str(e) == "UNIQUE constraint failed: sessions.cookie": continue elif str(e) == "UNIQUE constraint failed: sessions.cookie": return JSONResponse(status_code=503) else: raise break # return new session cookie return {"cookie": cookie} @app.get( "/next_picture/{cookie}", responses={ 204: {"description": "All available images have been appraised"}, 409: {"description": "Uninitiated session"}, }, ) async def next_picture(cookie: str): """Request new picture to rate.""" # check if the cookie is valid cur.execute( """SELECT sessionid FROM sessions WHERE cookie = :cookie LIMIT 1""", {"cookie": cookie}, ) sessionid = cur.fetchone() if sessionid is None: return JSONResponse(status_code=409) # take not rated picture from the database # do not insert anything in the database yet # return this picture # SELECT all images EXCEPT images with marks from the current session -> # -> SELECT paths for these images # FIXME[0]: can this be done better? cur.execute( """SELECT imgid, resizedpath FROM images WHERE imgid IN (SELECT imgid FROM images EXCEPT SELECT imgid FROM marks WHERE sessionid = :sessionid) LIMIT 1 """, {"sessionid": sessionid["sessionid"]}, ) r = cur.fetchone() if r is not None: return {"picture_id": r["imgid"], "picture_uri": r["resizedpath"]} else: # All available pics have been voted for by this sessionid return JSONResponse(status_code=204) @app.get( "/rate_picture/{cookie}/{picture_id}/{mark}", responses={ 406: {"description": "Already appraised"}, 409: {"description": "Uninitiated session"}, }, ) async def rate_picture(cookie: str, picture_id: int, mark: int): """Submit a rating for the picture""" # check if the cookie is valid cur.execute( """SELECT sessionid FROM sessions WHERE cookie = :cookie LIMIT 1""", {"cookie": cookie}, ) sessionid = cur.fetchone() if sessionid is None: return JSONResponse(status_code=409) # add new mark to the session table try: cur.execute( """INSERT INTO marks(imgid, sessionid, mark) VALUES(:imgid,:sessionid,:mark) """, {"imgid": picture_id, "sessionid": sessionid["sessionid"], "mark": mark}, ) con.commit() except sqlite3.IntegrityError as e: if str(e) == "UNIQUE constraint failed: marks.imgid, marks.sessionid": return JSONResponse(status_code=406) return {"status": "success"} @app.get("/photo_points") async def photo_points(): """Get points with the url of a photo and the rate""" # assume we always have at least some photos # fetch them all cur.execute( """SELECT images.imgid, resizedpath, GPSLatitude, GPSLongitude, 100*SUM(marks.mark)/COUNT(marks.mark)/MAX(marks.mark) FROM images LEFT JOIN marks ON images.imgid = marks.imgid GROUP BY images.imgid; """, # 100 * SUM(marks.mark)/COUNT(marks.mark)/MAX(marks.mark) # is an ad-hoc percentage of likes without know how front end defined like/dislike # returns None with no marks (sqlite handles division by 0 gracefully) ) points = cur.fetchall() return [ { "id": point["imgid"], "url": point["resizedpath"], "lon": point["GPSLongitude"], "lat": point["GPSLatitude"], "rate": point["100*SUM(marks.mark)/COUNT(marks.mark)/MAX(marks.mark)"], } for point in points ] @app.post( "/upload_pictures", responses={ 202: {"description": "Archive accepted into processing"}, 401: {"description": "Authentication is required to access this resource"}, 415: {"description": "Cannot process uploaded archive"}, }, ) async def upload_pictures( background_tasks: BackgroundTasks, credentials: HTTPBasicCredentials = Depends(security), file: UploadFile = File(...), ): """Photo upload endpoint""" """ Accepts photo in zip archives with any internal directory structure Valid uploads yield 202 status message and process photos in the background Non-zip archives yeild 415 error Upload is restricted by basic HTTP login, configurable in config.py """ # check authenticity correct_username = compare_digest(credentials.username, CRED_USERNAME) correct_password = compare_digest(credentials.password, CRED_PASSWORD) if not (correct_username and correct_password): return JSONResponse(status_code=401) # slurp the zip if not zipfile.is_zipfile(file.file): return JSONResponse(status_code=415) # detach from the interface # unpack zip tasks = BackgroundTasks() tasks.add_task( unpack_pictures_zip, file=file, time=datetime.utcnow().replace(microsecond=0).isoformat(), ) # feed the pictures to util/import_photos.py return JSONResponse("Accepted", background=tasks) def unpack_pictures_zip(file: UploadFile, time): """ Unpack and process zip archived photo Extract pictures in the DATA_LOCATION/processing and feed them to util/import_photos.py Walk the nested DATA_LOCATION/processing ourselves Uses: DB_LOCATION, DATA_LOCATION """ # we only call this function sporadically, so import here import os from shutil import rmtree print(f"Accepted {file.filename} at {time} into processing") processing_path = os.path.join(DATA_LOCATION, "processing" + time) os.makedirs(processing_path, exist_ok=True) # using private ._file field is a dirty hack, but # SpooledTemporaryFile does not implement seekable # required by zipfile 'r' mode # https://bugs.python.org/issue26175 with zipfile.ZipFile(file.file._file) as photo_zip: problem_files = photo_zip.testzip() if problem_files is not None: print(f"Errors in {file.filename} from {time} at {problem_files}") photo_zip.extractall(path=processing_path) photo_zip.close() print(f"Start processing {file.filename} from {time}") iph.check_database(database_path=DB_LOCATION) for (dir, _, _) in os.walk(processing_path): iph.run( db_location=DB_LOCATION, source=os.path.join(dir), dest_shrunk=os.path.join(DATA_LOCATION, DEST_SHRUNK), dest_original=os.path.join(DATA_LOCATION, DEST_ORIGINAL), ) rmtree(processing_path) print(f"Succesfully processed {file.filename} from {time}")