Merge pull request 'photo_points' (#4) from w2/photovoter_backend:photo_points into photo_points

Reviewed-on: https://git.iamonlyherefortheicecream.ml/DIWHY/photovoter_backend/pulls/4
pull/6/head
g 4 years ago
commit 6a9d70f139

@ -17,7 +17,7 @@ con = sqlite3.connect(DB_LOCATION)
con.row_factory = sqlite3.Row
cur = con.cursor() # NB! single is enough for now, we might require multiple later
origins = [ # CORS
origins = [ # CORS
"*",
]
@ -145,36 +145,30 @@ async def rate_picture(cookie: str, picture_id: int, mark: int):
return {"status": "success"}
@app.get("/photo_points")
def photo_points():
async def photo_points():
"""Get points with the url of a photo and the rate"""
# assume we always have at least some photos
# fetch them all
cur.execute(
"""SELECT images.imgid, resizedpath, GPSLatitude, GPSLongitude,
100*SUM(marks.mark)/COUNT(marks.mark)/MAX(marks.mark)
FROM images
LEFT JOIN marks ON images.imgid = marks.imgid
GROUP BY images.imgid;
""", # 100 * SUM(marks.mark)/COUNT(marks.mark)/MAX(marks.mark)
# is an ad-hoc percentage of likes without know how front end defined like/dislike
# returns None with no marks (sqlite handles division by 0 gracefully)
)
points = cur.fetchall()
return [
{
"id": 0,
"url": "https://upload.wikimedia.org/wikipedia/commons/thumb/c/c5/JPEG_example_down.jpg/350px-JPEG_example_down.jpg",
"lon": 37.34542,
"lat": 55.12323,
"rate": 36 # percentage of likes
},
{
"id": 1,
"url": "https://upload.wikimedia.org/wikipedia/commons/thumb/c/c5/JPEG_example_down.jpg/350px-JPEG_example_down.jpg",
"lon": 37.34342,
"lat": 55.12423,
"rate": 62 # percentage of likes
},
{
"id": 2,
"url": "https://upload.wikimedia.org/wikipedia/commons/thumb/c/c5/JPEG_example_down.jpg/350px-JPEG_example_down.jpg",
"lon": 37.34642,
"lat": 55.12223,
"rate": 43 # percentage of likes
},
{
"id": 3,
"url": "https://upload.wikimedia.org/wikipedia/commons/thumb/c/c5/JPEG_example_down.jpg/350px-JPEG_example_down.jpg",
"lon": 37.34342,
"lat": 55.12923,
"rate": 90 # percentage of likes
"id": point["imgid"],
"url": point["resizedpath"],
"lon": point["GPSLongitude"],
"lat": point["GPSLatitude"],
"rate": point["100*SUM(marks.mark)/COUNT(marks.mark)/MAX(marks.mark)"],
}
]
for point in points
]

@ -9,9 +9,11 @@ from shutil import move
import sqlite3
# update database residing here
DB_LOCATION = "db/photovoter.dblite" # Q: any allowances for this being not OUR database?
DB_LOCATION = (
"db/photovoter.dblite" # Q: any allowances for this being not OUR database?
)
# place compressed images here (needs to exist)
DEST_STRUNK = "db/image/"
DEST_SHRUNK = "db/image/"
# move originals here (needs to exist)
DEST_ORIGINAL = "db/original/"
@ -23,10 +25,10 @@ def usage():
def process_pictures():
"""Process images from the base directory in the first command line argument.
Place the resized copies to DEST_STRUNK and
Place the resized copies to DEST_SHRUNK and
move the originals to DEST_ORIGINAL.
Return a dict for each image processed for database collection.
Uses: DEST_STRUNK, DEST_ORIGINAL
Uses: DEST_SHRUNK, DEST_ORIGINAL
"""
# walk every pic
# We only care about files in the root of the path
@ -48,7 +50,7 @@ def process_pictures():
cloned.strip() # Q: may damage icc, do we allow that or use smh else?
cloned.transform(resize="50%") # Q: what do we want here?
# move them to the processed folder
cloned.save(filename=path.join(DEST_STRUNK, filename))
cloned.save(filename=path.join(DEST_SHRUNK, filename))
# move the originals out of the working directory
# Q: do we strip exif from originals?
@ -56,7 +58,7 @@ def process_pictures():
# return the freshly processed picture info
yield {
"ResizedImage": path.join(DEST_STRUNK, filename),
"ResizedImage": path.join(DEST_SHRUNK, filename),
"OriginalImage": path.join(DEST_ORIGINAL, filename),
"DateTimeOriginal": exif["DateTimeOriginal"], # Q: normalize it?
"GPSLatitude": exif["GPSLatitude"],

Loading…
Cancel
Save