feat: add libravatar API

This commit is contained in:
May 2024-11-20 19:36:24 -08:00
parent c70db40ffa
commit aa67b08239
Signed by: split
GPG key ID: C325C61F0BF517C0
9 changed files with 304 additions and 81 deletions

View file

@ -32,12 +32,12 @@ LIBRAVATAR__ENABLED=false
# Either jpeg or png. The type of image the libravatar API serves. Default png.
LIBRAVATAR__FORMAT=png
# What to do when libravatar requests a resolution that is not a valid output resolution. Default round.
# What to do when libravatar requests a resolution that is not a valid output resolution. Default nearest.
# Modes:
# round - Round to the nearest image size.
# nearest - Use the nearest image size.
# nocache - Render the image at the requested size without saving it to disk.
# cache - Render the image at the requested size and save it to disk.
LIBRAVATAR__GENERATION_MODE=round
LIBRAVATAR__GENERATION_MODE=nearest
# Prisma database URL
DATABASE_URL=file:../.data/data.db

View file

@ -0,0 +1,6 @@
-- CreateTable
CREATE TABLE "EmailHashes" (
"forUserId" TEXT NOT NULL PRIMARY KEY,
"sha256" BLOB NOT NULL,
CONSTRAINT "EmailHashes_forUserId_fkey" FOREIGN KEY ("forUserId") REFERENCES "User" ("userId") ON DELETE RESTRICT ON UPDATE CASCADE
);

View file

@ -0,0 +1,20 @@
/*
Warnings:
- Added the required column `md5` to the `EmailHashes` table without a default value. This is not possible if the table is not empty.
*/
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_EmailHashes" (
"forUserId" TEXT NOT NULL PRIMARY KEY,
"sha256" BLOB NOT NULL,
"md5" BLOB NOT NULL,
CONSTRAINT "EmailHashes_forUserId_fkey" FOREIGN KEY ("forUserId") REFERENCES "User" ("userId") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_EmailHashes" ("forUserId", "sha256") SELECT "forUserId", "sha256" FROM "EmailHashes";
DROP TABLE "EmailHashes";
ALTER TABLE "new_EmailHashes" RENAME TO "EmailHashes";
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -24,6 +24,7 @@ model User {
name String?
avatars Avatar[]
webhooks Webhook[]
emailHashes EmailHashes?
currentAvatarId String? @unique
currentAvatar Avatar? @relation("CurrentAvatar", fields: [currentAvatarId], references: [id])
@ -48,3 +49,10 @@ model Webhook {
@@unique([url, userId])
}
model EmailHashes {
forUserId String @id
user User @relation(fields: [forUserId], references: [userId])
sha256 Bytes
md5 Bytes
}

View file

@ -15,6 +15,16 @@ await mkdir(defaultAvatarDirectory, { recursive: true })
export const missingAvatarQueue = new Map<string, Promise<string>>()
export async function getAvailableSizesInPath(path: string) {
return (await readdir(path)).map(
e =>
[parseInt(e.match(/(.*)\..*/)?.[1] || "", 10), e] as [
number,
string,
]
)
}
/**
* @description Generate an avatar at the selected size and format
* @param path Path to the avatar directory
@ -32,25 +42,14 @@ export function generateMissingAvatar(
let prom = new Promise<string>(async (res, rej) => {
// locate best quality currently available
const av = await readdir(path)
// this can probably be done better but I DON'T GIVE A FUCK !!!!
const pathToBestQualityImg =
path == defaultAvatarDirectory
? "./assets/default.png"
: join(
path,
av
.map(
e =>
[
parseInt(
e.match(/(.*)\..*/)?.[1] || "",
10
),
e,
] as [number, string]
)
.sort(([a], [b]) => b - a)[0][1]
(await getAvailableSizesInPath(path)).sort(
([a], [b]) => b - a
)[0][1]
)
const buf = await readFile(pathToBestQualityImg)
@ -72,7 +71,8 @@ export function generateMissingAvatar(
export async function getPathToAvatar(
avatarId?: string,
size: number = configuration.images.default_resolution,
fmt?: string
fmt?: string,
bypass_size_limits: boolean = false
) {
if (avatarId?.includes("/")) throw Error("AvatarID cannot include /")
@ -112,7 +112,10 @@ export async function getPathToAvatar(
.find(s => parseInt(s.name.match(/(.*)\..*/)?.[1] || "", 10) == size)
if (targetAvatar) return join(targetAvatarDirectory, targetAvatar.name)
else if (configuration.images.output_resolutions.includes(size))
else if (
configuration.images.output_resolutions.includes(size) ||
bypass_size_limits
)
return makeMissing() // generate image at this size for the specified format
}

View file

@ -59,11 +59,11 @@ const configuration = {
)
? env.LIBRAVATAR__FORMAT
: "jpeg") as keyof FormatEnum,
resize_mode: ["round", "nocache", "cache"].includes(
resize_mode: (["nearest", "nocache", "cache"].includes(
env.LIBRAVATAR__GENERATION_MODE
)
? env.LIBRAVATAR__GENERATION_MODE
: "round",
: "nearest") as "nearest" | "nocache" | "cache",
},
}
: {}),

View file

@ -2,9 +2,14 @@ import { error, redirect, type Cookies } from "@sveltejs/kit"
import configuration from "./configuration"
import type { User } from "./types"
import { prisma } from "./clientsingleton"
import type { EmailHashes } from "@prisma/client"
import crypto from "node:crypto"
// Map of OAuth2 states
const states = new Map<string, { redirect_uri: string, timeout: ReturnType<typeof setTimeout> }>()
const states = new Map<
string,
{ redirect_uri: string; timeout: ReturnType<typeof setTimeout> }
>()
// Cache of userinfo
const userInfoCache = new Map<string, User>()
@ -22,7 +27,7 @@ export function launchLogin(url: string) {
client_id: configuration.oauth2.client.id,
redirect_uri: url,
scope: configuration.oauth2.client.scopes,
state
state,
})
// Did not think this would work lmao
const target = new URL(
@ -33,17 +38,10 @@ export function launchLogin(url: string) {
// cache state
// NO IDEA IF THIS WORKS IN SERVERLESS LOL
// not like this is going to be running serverless anyway
states
.set(
state,
{
timeout: setTimeout(
() => states.delete(state),
2*60*1000
),
redirect_uri: url
}
)
states.set(state, {
timeout: setTimeout(() => states.delete(state), 2 * 60 * 1000),
redirect_uri: url,
})
throw redirect(302, target.toString())
}
@ -55,38 +53,46 @@ export function launchLogin(url: string) {
*/
export async function getNewToken(
params:
{grant_type: "authorization_code", redirect_uri: string, code: string}
| {grant_type: "refresh_token", refresh_token: string}
| {
grant_type: "authorization_code"
redirect_uri: string
code: string
}
| { grant_type: "refresh_token"; refresh_token: string }
) {
// Generate a query string for the request
const searchParams = new URLSearchParams({
...params,
client_id: configuration.oauth2.client.id,
client_secret: configuration.oauth2.client.secret
client_secret: configuration.oauth2.client.secret,
})
// send request to retrieve tokens
let res = await fetch(configuration.oauth2.endpoints.token, {
method: "POST",
body: searchParams // this standard sucks, actually
body: searchParams, // this standard sucks, actually
})
if (res.ok)
return (await res.json()) as { access_token: string, expires_in: number, refresh_token?: string }
return (await res.json()) as {
access_token: string
expires_in: number
refresh_token?: string
}
}
export function fetchUserInfo(token: string) {
// try fetching new userinfo
return fetch(configuration.userinfo.route, {
headers: {
"Authorization": `Bearer ${token}`
}
Authorization: `Bearer ${token}`,
},
})
}
export async function getUserInfo(id: string) {
// fetch token information
const tokenInfo = await prisma.token.findUnique({
where: { id }
where: { id },
})
if (!tokenInfo) return
@ -103,15 +109,15 @@ export async function getUserInfo(id: string) {
if (!tokenInfo.refreshToken) return // no refresh token. back out
let token = await getNewToken({
grant_type: "refresh_token",
refresh_token: tokenInfo.refreshToken
refresh_token: tokenInfo.refreshToken,
})
if (!token) return // refresh failed. back out
await prisma.token.update({
where: { id },
data: {
token: token.access_token,
refreshToken: token.refresh_token
}
refreshToken: token.refresh_token,
},
})
userInfoRequest = await fetchUserInfo(token.access_token)
@ -120,6 +126,20 @@ export async function getUserInfo(id: string) {
userInfo = await userInfoRequest.json()
// get emailHashes
let emailHashes: Omit<EmailHashes, "forUserId"> | undefined = undefined
if (userInfo.email) {
emailHashes = {
sha256: crypto
.createHash("sha256")
.update(userInfo.email)
.digest(),
md5: crypto.createHash("md5").update(userInfo.email).digest(),
}
}
// update user
await prisma.user.upsert({
where: {
@ -127,37 +147,58 @@ export async function getUserInfo(id: string) {
},
update: {
identifier: userInfo[configuration.userinfo.identifier],
name: userInfo.name
name: userInfo.name,
...(emailHashes
? {
emailHashes: {
upsert: {
create: emailHashes,
update: emailHashes,
},
},
}
: {}),
},
create: {
userId: userInfo.sub,
identifier: userInfo[configuration.userinfo.identifier],
name: userInfo.name
name: userInfo.name,
...(emailHashes
? {
emailHashes: {
create: emailHashes,
},
}
: {}),
},
})
// cache userinfo
userInfoCache.set(tokenInfo.owner, userInfo)
setTimeout(() => userInfoCache.delete(tokenInfo.owner), 15*60*1000)
setTimeout(() => userInfoCache.delete(tokenInfo.owner), 15 * 60 * 1000)
}
return { ...userInfo, identifier: userInfo[configuration.userinfo.identifier] } as User
return {
...userInfo,
identifier: userInfo[configuration.userinfo.identifier],
} as User
}
export function deleteToken(id: string) {
prisma.token.delete({
where: {id}
where: { id },
})
}
export async function getRequestUser(request: Request, cookies: Cookies) {
const params = new URLSearchParams(request.url.split("?").slice(1).join("?"))
const params = new URLSearchParams(
request.url.split("?").slice(1).join("?")
)
let token = cookies.get("token")
// log user in
if (!token && params.has("code") && params.has("state")) {
// check if state is real
if (!states.has(params.get("state")!))
throw error(401, "bad state")
if (!states.has(params.get("state")!)) throw error(401, "bad state")
// get state
let state = states.get(params.get("state")!)!
@ -168,23 +209,28 @@ export async function getRequestUser(request: Request, cookies: Cookies) {
let tokens = await getNewToken({
grant_type: "authorization_code",
redirect_uri: state.redirect_uri,
code: params.get("code")!
code: params.get("code")!,
})
if (!tokens)
throw error(401, "Couldn't get initial token, code may be incorrect")
throw error(
401,
"Couldn't get initial token, code may be incorrect"
)
// fetch userdata
// could cache this, but lazy
let userInfo = await (await fetchUserInfo(tokens.access_token)).json() as User
let userInfo = (await (
await fetchUserInfo(tokens.access_token)
).json()) as User
// create a new token
let newToken = await prisma.token.create({
data: {
token: tokens.access_token,
refreshToken: tokens.refresh_token,
owner: userInfo.sub
}
owner: userInfo.sub,
},
})
token = newToken.id

View file

@ -2,4 +2,5 @@ export interface User {
name: string
sub: string
identifier: string
email?: string
}

View file

@ -0,0 +1,139 @@
import {
avatarDirectory,
getAvailableSizesInPath,
getPathToAvatar,
getPathToAvatarForIdentifier,
renderAvatar,
} from "$lib/avatars.js"
import { prisma } from "$lib/clientsingleton.js"
import configuration from "$lib/configuration.js"
import { getRequestUser } from "$lib/oidc.js"
import { error, redirect } from "@sveltejs/kit"
import { readFile } from "fs/promises"
import mime from "mime"
import { join } from "path"
export async function GET({ params: { hash }, url }) {
if (!configuration.libravatar)
throw error(501, "The libravatar API is disabled on this server")
const requestedSize = parseInt(url.searchParams.get("s") || "0", 10) || 80
const fallback = url.searchParams.get("d") || "mm"
const forceDefault = url.searchParams.get("f") === "y"
const hashBinary = Buffer.from(hash, "hex")
let size = requestedSize > 512 || requestedSize < 1 ? 80 : requestedSize
// try to find the user from the hashBinary
const avatarId = (
await prisma.emailHashes.findFirst({
where: {
OR: [
{
sha256: hashBinary,
},
{
md5: hashBinary,
},
],
},
select: {
user: {
select: {
currentAvatarId: true,
},
},
},
})
)?.user.currentAvatarId
let avPath: string | undefined
if (!forceDefault && avatarId)
switch (configuration.libravatar.resize_mode) {
case "nearest":
// find nearest size available
size = configuration.images.output_resolutions.includes(size)
? size
: configuration.images.output_resolutions
.slice()
.sort(
(a, b) => Math.abs(size - a) - Math.abs(size - b)
)[0]
// don't break here so it goes into the cache case
case "cache":
// get path to avatar
avPath = await getPathToAvatar(
avatarId,
size,
configuration.libravatar.output_format,
// bypass size limits if cache
// nearest shouldn't trigger this anyway but just in case
configuration.libravatar.resize_mode == "cache"
)
break
case "nocache":
const avatarPath = join(avatarDirectory, avatarId),
avImgs = await getAvailableSizesInPath(avatarPath),
avImageSizes = avImgs.map(e => e[0])
if (!avImageSizes.includes(size)) {
// we need to scale down
// find the next largest image resolution
let sortedSizes = [...avImageSizes, size].sort(
(a, b) => a - b
)
// try to get higher res if exists, otherwise get lower
let scaleDownFrom = join(
avatarPath,
avImgs[
avImageSizes.indexOf(
sortedSizes[sortedSizes.indexOf(size) + 1] ||
sortedSizes[sortedSizes.indexOf(size) - 1]
)
][1]
)
// render an avatar
let avatar = await renderAvatar(
await readFile(scaleDownFrom),
size,
configuration.libravatar.output_format
)
// serve image
return new Response(await avatar.img.toBuffer(), {
headers: {
"Content-Type":
mime.getType(avatar.extension!) || "",
},
})
} else {
// we don't need to scale down. serve this image
avPath = join(
avatarPath,
avImgs[avImageSizes.indexOf(size)][1]
)
}
}
if (!avPath) {
switch (fallback) {
case "404":
throw error(404, "Avatar not found")
case "mm":
case "mp":
// TODO: serve a default image at the correct size
throw redirect(302, "/avatars/default/image")
default:
throw redirect(302, fallback)
}
}
return new Response(await readFile(avPath), {
headers: {
"Content-Type": mime.getType(avPath) || "",
},
})
}