From 2b70cea4e05b7fbc1238928cdb89dd1617ccdf9a Mon Sep 17 00:00:00 2001 From: Husky <39809509+Huskydog9988@users.noreply.github.com> Date: Tue, 8 Jul 2025 22:01:23 -0400 Subject: [PATCH] Logging (#131) * ci: pull version from package.json on build * fix: implicit any type * feat: inital support for logger * style: fix lint * feat: move more logging over to pino * fix: logging around company importing --- components/GameEditor/Metadata.vue | 2 +- components/GamePanel.vue | 4 +- components/NewsArticleCreateButton.vue | 2 +- components/OptionWrapper.vue | 8 +- components/UploadFileDialog.vue | 2 +- composables/task.ts | 5 +- nuxt.config.ts | 65 ++++++-- package.json | 4 +- plugins/error-handler.ts | 4 +- server/api/v1/auth/signin/simple.post.ts | 3 +- server/api/v1/notifications/ws.get.ts | 3 +- server/internal/acls/index.ts | 3 - server/internal/auth/index.ts | 7 +- server/internal/auth/oidc/index.ts | 3 +- server/internal/library/index.ts | 8 +- server/internal/logging/index.ts | 12 ++ server/internal/metadata/giantbomb.ts | 28 ++-- server/internal/metadata/igdb.ts | 35 +++-- server/internal/metadata/index.ts | 16 +- server/internal/metadata/manual.ts | 4 +- server/internal/metadata/pcgamingwiki.ts | 38 +++-- server/internal/objects/fsBackend.ts | 16 +- server/internal/objects/objectHandler.ts | 7 +- server/internal/objects/transactional.ts | 4 +- server/internal/tasks/index.ts | 145 +++++++++++++----- server/internal/tasks/registry/invitations.ts | 7 +- server/internal/tasks/registry/objects.ts | 24 +-- server/internal/tasks/registry/sessions.ts | 7 +- server/internal/tasks/registry/update.ts | 36 +++-- server/plugins/03.metadata-init.ts | 7 +- server/plugins/05.library-init.ts | 5 +- utils/parseTaskLog.ts | 4 +- yarn.lock | 134 +++++++++++++++- 33 files changed, 481 insertions(+), 171 deletions(-) create mode 100644 server/internal/logging/index.ts diff --git a/components/GameEditor/Metadata.vue b/components/GameEditor/Metadata.vue index c69833e..1b8a10d 100644 --- a/components/GameEditor/Metadata.vue +++ b/components/GameEditor/Metadata.vue @@ -376,7 +376,7 @@ accept="image/*" class="hidden" type="file" - @change="(e) => coreMetadataUploadFiles(e as any)" + @change="(e: Event) => coreMetadataUploadFiles(e as any)" /> diff --git a/components/GamePanel.vue b/components/GamePanel.vue index 6d001ff..d7fd4d8 100644 --- a/components/GamePanel.vue +++ b/components/GamePanel.vue @@ -32,7 +32,9 @@ :class="{ 'group-hover:text-white transition-colors': animate }" class="text-zinc-100 text-sm font-bold font-display" > - {{ game ? game.mName : $t("settings.admin.store.dropGameNamePlaceholder") }} + {{ + game ? game.mName : $t("settings.admin.store.dropGameNamePlaceholder") + }}
- +
@@ -16,7 +16,7 @@ diff --git a/components/UploadFileDialog.vue b/components/UploadFileDialog.vue index 721d8a4..1a4d95c 100644 --- a/components/UploadFileDialog.vue +++ b/components/UploadFileDialog.vue @@ -67,7 +67,7 @@ class="hidden" type="file" :multiple="props.multiple" - @change="(e) => (file = (e.target as any)?.files)" + @change="(e: Event) => (file = (e.target as any)?.files)" /> diff --git a/composables/task.ts b/composables/task.ts index e4405be..bb383b0 100644 --- a/composables/task.ts +++ b/composables/task.ts @@ -1,5 +1,6 @@ import type { TaskMessage } from "~/server/internal/tasks"; import { WebSocketHandler } from "./ws"; +import { logger } from "~/server/internal/logging"; const websocketHandler = new WebSocketHandler("/api/v1/task"); // const taskStates: { [key: string]: } = {}; @@ -38,7 +39,7 @@ websocketHandler.listen((message) => { case "disconnect": { const disconnectTaskId = data[0]; taskStates.delete(disconnectTaskId); - console.log(`disconnected from ${disconnectTaskId}`); + logger.info(`disconnected from ${disconnectTaskId}`); break; } case "error": { @@ -71,7 +72,7 @@ export const useTask = (taskId: string): Ref => { if (task && task.value && !task.value.error) return task; taskStates.set(taskId, ref(undefined)); - console.log("connecting to " + taskId); + logger.info("connecting to " + taskId); websocketHandler.send(`connect/${taskId}`); // TODO: this may have changed behavior return taskStates.get(taskId) ?? ref(undefined); diff --git a/nuxt.config.ts b/nuxt.config.ts index f872521..8b09e3a 100644 --- a/nuxt.config.ts +++ b/nuxt.config.ts @@ -1,20 +1,15 @@ import tailwindcss from "@tailwindcss/vite"; import { execSync } from "node:child_process"; -import { cpSync } from "node:fs"; +import { cpSync, readFileSync, existsSync } from "node:fs"; import path from "node:path"; import module from "module"; import { viteStaticCopy } from "vite-plugin-static-copy"; +import { type } from "arktype"; -// get drop version -const dropVersion = process.env.BUILD_DROP_VERSION ?? "v0.3.0-alpha.1"; -// example nightly: "v0.3.0-nightly.2025.05.28" - -// get git ref or supply during build -const commitHash = - process.env.BUILD_GIT_REF ?? - execSync("git rev-parse --short HEAD").toString().trim(); - -console.log(`Building Drop ${dropVersion} #${commitHash}`); +const packageJsonSchema = type({ + name: "string", + version: "string", +}); const twemojiJson = module.findPackageJSON( "@discordapp/twemoji", @@ -24,6 +19,16 @@ if (!twemojiJson) { throw new Error("Could not find @discordapp/twemoji package."); } +// get drop version +const dropVersion = getDropVersion(); + +// get git ref or supply during build +const commitHash = + process.env.BUILD_GIT_REF ?? + execSync("git rev-parse --short HEAD").toString().trim(); + +console.log(`Drop ${dropVersion} #${commitHash}`); + // https://nuxt.com/docs/api/configuration/nuxt-config export default defineNuxtConfig({ extends: ["./drop-base"], @@ -257,3 +262,41 @@ export default defineNuxtConfig({ requestSizeLimiter: false, }, }); + +/** + * Gets the drop version from the environment variable or package.json + * @returns {string} The drop version + */ +function getDropVersion(): string { + // get drop version from environment variable + if (process.env.BUILD_DROP_VERSION) { + return process.env.BUILD_DROP_VERSION; + } + // example nightly: "v0.3.0-nightly.2025.05.28" + const defaultVersion = "v0.0.0-alpha.0"; + + // get path + const packageJsonPath = path.join( + path.dirname(import.meta.url.replace("file://", "")), + "package.json", + ); + console.log(`Reading package.json from ${packageJsonPath}`); + if (!existsSync(packageJsonPath)) { + console.error("Could not find package.json, using default version."); + return defaultVersion; + } + + // parse package.json + const raw = JSON.parse(readFileSync(packageJsonPath, "utf-8")); + const packageJson = packageJsonSchema(raw); + if (packageJson instanceof type.errors) { + console.error("Failed to parse package.json", packageJson.summary); + return defaultVersion; + } + + // ensure version starts with 'v' + if (packageJson.version.startsWith("v")) { + return packageJson.version; + } + return `v${packageJson.version}`; +} diff --git a/package.json b/package.json index bcffcac..fb245d3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "drop", - "version": "0.3.0", + "version": "0.3.0-alpha.0", "private": true, "type": "module", "license": "AGPL-3.0-or-later", @@ -41,6 +41,8 @@ "normalize-url": "^8.0.2", "nuxt": "^3.17.4", "nuxt-security": "2.2.0", + "pino": "^9.7.0", + "pino-pretty": "^13.0.0", "prisma": "^6.7.0", "sanitize-filename": "^1.6.3", "semver": "^7.7.1", diff --git a/plugins/error-handler.ts b/plugins/error-handler.ts index 4337928..2122972 100644 --- a/plugins/error-handler.ts +++ b/plugins/error-handler.ts @@ -1,5 +1,7 @@ +import { logger } from "~/server/internal/logging"; + export default defineNuxtPlugin((nuxtApp) => { nuxtApp.hook("vue:error", (error, instance, info) => { - console.error(error, instance, info); + logger.error(info, error, instance); }); }); diff --git a/server/api/v1/auth/signin/simple.post.ts b/server/api/v1/auth/signin/simple.post.ts index 4e0c6ba..9c49a38 100644 --- a/server/api/v1/auth/signin/simple.post.ts +++ b/server/api/v1/auth/signin/simple.post.ts @@ -7,6 +7,7 @@ import authManager, { checkHashArgon2, checkHashBcrypt, } from "~/server/internal/auth"; +import { logger } from "~/server/internal/logging"; const signinValidator = type({ username: "string", @@ -28,7 +29,7 @@ export default defineEventHandler<{ const body = signinValidator(await readBody(h3)); if (body instanceof type.errors) { // hover out.summary to see validation errors - console.error(body.summary); + logger.error(body.summary); throw createError({ statusCode: 400, diff --git a/server/api/v1/notifications/ws.get.ts b/server/api/v1/notifications/ws.get.ts index e629702..2f86d7c 100644 --- a/server/api/v1/notifications/ws.get.ts +++ b/server/api/v1/notifications/ws.get.ts @@ -1,5 +1,6 @@ import notificationSystem from "~/server/internal/notifications"; import aclManager from "~/server/internal/acls"; +import { logger } from "~/server/internal/logging"; // TODO add web socket sessions for horizontal scaling // Peer ID to user ID @@ -29,7 +30,7 @@ export default defineWebSocketHandler({ async close(peer, _details) { const userId = socketSessions.get(peer.id); if (!userId) { - console.log(`skipping websocket close for ${peer.id}`); + logger.info(`skipping websocket close for ${peer.id}`); return; } diff --git a/server/internal/acls/index.ts b/server/internal/acls/index.ts index 2668bc8..cd4bb41 100644 --- a/server/internal/acls/index.ts +++ b/server/internal/acls/index.ts @@ -134,9 +134,6 @@ class ACLManager { if (tokenACLIndex != -1) return token.userId; } - console.log(token); - console.log(acls); - return undefined; } diff --git a/server/internal/auth/index.ts b/server/internal/auth/index.ts index 8942bf1..fda5c06 100644 --- a/server/internal/auth/index.ts +++ b/server/internal/auth/index.ts @@ -1,5 +1,6 @@ import { AuthMec } from "~/prisma/client"; import { OIDCManager } from "./oidc"; +import { logger } from "~/server/internal/logging"; class AuthManager { private authProviders: { @@ -21,7 +22,7 @@ class AuthManager { }; constructor() { - console.log("AuthManager initialized"); + logger.info("AuthManager initialized"); } async init() { @@ -31,9 +32,9 @@ class AuthManager { if (!object) break; // eslint-disable-next-line @typescript-eslint/no-explicit-any (this.authProviders as any)[key] = object; - console.log(`enabled auth: ${key}`); + logger.info(`enabled auth: ${key}`); } catch (e) { - console.warn(e); + logger.warn(e); } } diff --git a/server/internal/auth/oidc/index.ts b/server/internal/auth/oidc/index.ts index 4048386..fee8900 100644 --- a/server/internal/auth/oidc/index.ts +++ b/server/internal/auth/oidc/index.ts @@ -6,6 +6,7 @@ import objectHandler from "../../objects"; import type { Readable } from "stream"; import * as jdenticon from "jdenticon"; import { systemConfig } from "../../config/sys-conf"; +import { logger } from "~/server/internal/logging"; interface OIDCWellKnown { authorization_endpoint: string; @@ -206,7 +207,7 @@ export class OIDCManager { return { user, options: session.options }; } catch (e) { - console.error(e); + logger.error(e); return `Request to identity provider failed: ${e}`; } } diff --git a/server/internal/library/index.ts b/server/internal/library/index.ts index 4e9cd5f..d9e563a 100644 --- a/server/internal/library/index.ts +++ b/server/internal/library/index.ts @@ -223,7 +223,7 @@ class LibraryManager { taskGroup: "import:game", name: `Importing version ${versionName} for ${game.mName}`, acls: ["system:import:version:read"], - async run({ progress, log }) { + async run({ progress, logger }) { // First, create the manifest via droplet. // This takes up 90% of our progress, so we wrap it in a *0.9 const manifest = await library.generateDropletManifest( @@ -235,11 +235,11 @@ class LibraryManager { }, (err, value) => { if (err) throw err; - log(value); + logger.info(value); }, ); - log("Created manifest successfully!"); + logger.info("Created manifest successfully!"); const currentIndex = await prisma.gameVersion.count({ where: { gameId: gameId }, @@ -282,7 +282,7 @@ class LibraryManager { }); } - log("Successfully created version!"); + logger.info("Successfully created version!"); notificationSystem.systemPush({ nonce: `version-create-${gameId}-${versionName}`, diff --git a/server/internal/logging/index.ts b/server/internal/logging/index.ts new file mode 100644 index 0000000..deee48c --- /dev/null +++ b/server/internal/logging/index.ts @@ -0,0 +1,12 @@ +import pino from "pino"; + +export const logger = pino({ + transport: { + target: "pino-pretty", + options: { + colorize: true, + }, + }, +}); + +logger.child({}); diff --git a/server/internal/metadata/giantbomb.ts b/server/internal/metadata/giantbomb.ts index 176d808..53d71dc 100644 --- a/server/internal/metadata/giantbomb.ts +++ b/server/internal/metadata/giantbomb.ts @@ -169,7 +169,7 @@ export class GiantBombProvider implements MetadataProvider { { id, publisher, developer, createObject }: _FetchGameMetadataParams, context?: TaskRunContext, ): Promise { - context?.log("Using GiantBomb provider"); + context?.logger.info("Using GiantBomb provider"); const result = await this.request("game", id, {}); const gameData = result.data.results; @@ -181,10 +181,14 @@ export class GiantBombProvider implements MetadataProvider { const publishers: Company[] = []; if (gameData.publishers) { for (const pub of gameData.publishers) { - context?.log(`Importing publisher "${pub.name}"`); + context?.logger.info(`Importing publisher "${pub.name}"`); const res = await publisher(pub.name); - if (res === undefined) continue; + if (res === undefined) { + context?.logger.warn(`Failed to import publisher "${pub}"`); + continue; + } + context?.logger.info(`Imported publisher "${pub}"`); publishers.push(res); } } @@ -194,10 +198,14 @@ export class GiantBombProvider implements MetadataProvider { const developers: Company[] = []; if (gameData.developers) { for (const dev of gameData.developers) { - context?.log(`Importing developer "${dev.name}"`); + context?.logger.info(`Importing developer "${dev.name}"`); const res = await developer(dev.name); - if (res === undefined) continue; + if (res === undefined) { + context?.logger.warn(`Failed to import developer "${dev}"`); + continue; + } + context?.logger.info(`Imported developer "${dev}"`); developers.push(res); } } @@ -211,7 +219,7 @@ export class GiantBombProvider implements MetadataProvider { const images = [banner, ...imageURLs.map(createObject)]; - context?.log(`Found all images. Total of ${images.length + 1}.`); + context?.logger.info(`Found all images. Total of ${images.length + 1}.`); const releaseDate = gameData.original_release_date ? DateTime.fromISO(gameData.original_release_date).toJSDate() @@ -225,7 +233,7 @@ export class GiantBombProvider implements MetadataProvider { const reviews: GameMetadataRating[] = []; if (gameData.reviews) { - context?.log("Found reviews, importing..."); + context?.logger.info("Found reviews, importing..."); for (const { api_detail_url } of gameData.reviews) { const reviewId = api_detail_url.split("/").at(-2); if (!reviewId) continue; @@ -260,7 +268,7 @@ export class GiantBombProvider implements MetadataProvider { images, }; - context?.log("GiantBomb provider finished."); + context?.logger.info("GiantBomb provider finished."); context?.progress(100); return metadata; @@ -268,7 +276,7 @@ export class GiantBombProvider implements MetadataProvider { async fetchCompany({ query, createObject, - }: _FetchCompanyMetadataParams): Promise { + }: _FetchCompanyMetadataParams): Promise { const results = await this.request>( "search", "", @@ -279,7 +287,7 @@ export class GiantBombProvider implements MetadataProvider { const company = results.data.results.find((e) => e.name == query) ?? results.data.results.at(0); - if (!company) throw new Error(`No results for "${query}"`); + if (!company) return undefined; const longDescription = company.description ? this.turndown.turndown(company.description) diff --git a/server/internal/metadata/igdb.ts b/server/internal/metadata/igdb.ts index 877e061..7631f2a 100644 --- a/server/internal/metadata/igdb.ts +++ b/server/internal/metadata/igdb.ts @@ -14,6 +14,7 @@ import axios from "axios"; import { DateTime } from "luxon"; import * as jdenticon from "jdenticon"; import type { TaskRunContext } from "../tasks"; +import { logger } from "~/server/internal/logging"; type IGDBID = number; @@ -163,7 +164,7 @@ export class IGDBProvider implements MetadataProvider { } private async authWithTwitch() { - console.log("IGDB authorizing with twitch"); + logger.info("IGDB authorizing with twitch"); const params = new URLSearchParams({ client_id: this.clientId, client_secret: this.clientSecret, @@ -186,7 +187,7 @@ export class IGDBProvider implements MetadataProvider { seconds: response.data.expires_in, }); - console.log("IDGB done authorizing with twitch"); + logger.info("IDGB done authorizing with twitch"); } private async refreshCredentials() { @@ -354,16 +355,16 @@ export class IGDBProvider implements MetadataProvider { const currentGame = (await this.request("games", body)).at(0); if (!currentGame) throw new Error("No game found on IGDB with that id"); - context?.log("Using IDGB provider."); + context?.logger.info("Using IDGB provider."); let iconRaw; const cover = currentGame.cover; if (cover !== undefined) { - context?.log("Found cover URL, using..."); + context?.logger.info("Found cover URL, using..."); iconRaw = await this.getCoverURL(cover); } else { - context?.log("Missing cover URL, using fallback..."); + context?.logger.info("Missing cover URL, using fallback..."); iconRaw = jdenticon.toPng(id, 512); } @@ -400,7 +401,7 @@ export class IGDBProvider implements MetadataProvider { >("companies", `where id = ${foundInvolved.company}; fields name;`); for (const company of findCompanyResponse) { - context?.log( + context?.logger.info( `Found involved company "${company.name}" as: ${foundInvolved.developer ? "developer, " : ""}${foundInvolved.publisher ? "publisher" : ""}`, ); @@ -408,13 +409,25 @@ export class IGDBProvider implements MetadataProvider { // CANNOT use else since a company can be both if (foundInvolved.developer) { const res = await developer(company.name); - if (res === undefined) continue; + if (res === undefined) { + context?.logger.warn( + `Failed to import developer "${company.name}"`, + ); + continue; + } + context?.logger.info(`Imported developer "${company.name}"`); developers.push(res); } if (foundInvolved.publisher) { const res = await publisher(company.name); - if (res === undefined) continue; + if (res === undefined) { + context?.logger.warn( + `Failed to import publisher "${company.name}"`, + ); + continue; + } + context?.logger.info(`Imported publisher "${company.name}"`); publishers.push(res); } } @@ -461,7 +474,7 @@ export class IGDBProvider implements MetadataProvider { images, }; - context?.log("IGDB provider finished."); + context?.logger.info("IGDB provider finished."); context?.progress(100); return metadata; @@ -469,7 +482,7 @@ export class IGDBProvider implements MetadataProvider { async fetchCompany({ query, createObject, - }: _FetchCompanyMetadataParams): Promise { + }: _FetchCompanyMetadataParams): Promise { const response = await this.request( "companies", `where name = "${query}"; fields *; limit 1;`, @@ -503,6 +516,6 @@ export class IGDBProvider implements MetadataProvider { return metadata; } - throw new Error(`igdb failed to find publisher/developer ${query}`); + return undefined; } } diff --git a/server/internal/metadata/index.ts b/server/internal/metadata/index.ts index 8292bd1..1bb3b1d 100644 --- a/server/internal/metadata/index.ts +++ b/server/internal/metadata/index.ts @@ -16,6 +16,7 @@ import type { TaskRunContext } from "../tasks"; import taskHandler, { wrapTaskContext } from "../tasks"; import { randomUUID } from "crypto"; import { fuzzy } from "fast-fuzzy"; +import { logger } from "~/server/internal/logging"; export class MissingMetadataProviderConfig extends Error { private providerName: string; @@ -89,7 +90,7 @@ export class MetadataHandler { ); resolve(mappedResults); } catch (e) { - console.warn(e); + logger.warn(e); reject(e); } }); @@ -187,7 +188,7 @@ export class MetadataHandler { taskGroup: "import:game", acls: ["system:import:game:read"], async run(context) { - const { progress, log } = context; + const { progress, logger } = context; progress(0); @@ -262,12 +263,12 @@ export class MetadataHandler { }); progress(63); - log(`Successfully fetched all metadata.`); - log(`Importing objects...`); + logger.info(`Successfully fetched all metadata.`); + logger.info(`Importing objects...`); await pullObjects(); - log(`Finished game import.`); + logger.info(`Finished game import.`); }, }); @@ -301,7 +302,7 @@ export class MetadataHandler { ); } } catch (e) { - console.warn(e); + logger.warn(e); dumpObjects(); continue; } @@ -337,9 +338,6 @@ export class MetadataHandler { return object; } - // throw new Error( - // `No metadata provider found a ${databaseName} for "${query}"`, - // ); return undefined; } } diff --git a/server/internal/metadata/manual.ts b/server/internal/metadata/manual.ts index 551faf3..be66798 100644 --- a/server/internal/metadata/manual.ts +++ b/server/internal/metadata/manual.ts @@ -44,7 +44,7 @@ export class ManualMetadataProvider implements MetadataProvider { } async fetchCompany( _params: _FetchCompanyMetadataParams, - ): Promise { - throw new Error("Method not implemented."); + ): Promise { + return undefined; } } diff --git a/server/internal/metadata/pcgamingwiki.ts b/server/internal/metadata/pcgamingwiki.ts index 8bdf593..fb9c0f6 100644 --- a/server/internal/metadata/pcgamingwiki.ts +++ b/server/internal/metadata/pcgamingwiki.ts @@ -16,6 +16,7 @@ import { DateTime } from "luxon"; import * as cheerio from "cheerio"; import { type } from "arktype"; import type { TaskRunContext } from "../tasks"; +import { logger } from "~/server/internal/logging"; interface PCGamingWikiParseRawPage { parse: { @@ -184,7 +185,7 @@ export class PCGamingWikiProvider implements MetadataProvider { let matches; if ((matches = opencriticRegex.exec(url.pathname)) !== null) { matches.forEach((match, _groupIndex) => { - // console.log(`Found match, group ${_groupIndex}: ${match}`); + // logger.log(`Found match, group ${_groupIndex}: ${match}`); id = match; }); } @@ -199,7 +200,7 @@ export class PCGamingWikiProvider implements MetadataProvider { return url.pathname.replace("/games/", "").replace(/\/$/, ""); } default: { - console.warn("Pcgamingwiki, unknown host", url.hostname); + logger.warn("Pcgamingwiki, unknown host", url.hostname); return undefined; } } @@ -223,7 +224,7 @@ export class PCGamingWikiProvider implements MetadataProvider { const href = reviewEle.attr("href"); if (!href) { - console.log( + logger.info( `pcgamingwiki: failed to properly get review href for ${source}`, ); return undefined; @@ -232,7 +233,7 @@ export class PCGamingWikiProvider implements MetadataProvider { rating: reviewEle.text().trim(), }); if (ratingObj instanceof type.errors) { - console.log( + logger.info( "pcgamingwiki: failed to properly get review rating", ratingObj.summary, ); @@ -374,7 +375,7 @@ export class PCGamingWikiProvider implements MetadataProvider { { id, name, publisher, developer, createObject }: _FetchGameMetadataParams, context?: TaskRunContext, ): Promise { - context?.log("Using PCGamingWiki provider"); + context?.logger.info("Using PCGamingWiki provider"); context?.progress(0); const searchParams = new URLSearchParams({ @@ -397,13 +398,18 @@ export class PCGamingWikiProvider implements MetadataProvider { const publishers: Company[] = []; if (game.Publishers !== null) { - context?.log("Found publishers, importing..."); + context?.logger.info("Found publishers, importing..."); const pubListClean = this.parseWikiStringArray(game.Publishers); for (const pub of pubListClean) { - context?.log(`Importing "${pub}"...`); + context?.logger.info(`Importing publisher "${pub}"...`); const res = await publisher(pub); - if (res === undefined) continue; + if (res === undefined) { + context?.logger.warn(`Failed to import publisher "${pub}"`); + continue; + } + context?.logger.info(`Imported publisher "${pub}"`); + // add to publishers publishers.push(res); } } @@ -412,12 +418,16 @@ export class PCGamingWikiProvider implements MetadataProvider { const developers: Company[] = []; if (game.Developers !== null) { - context?.log("Found developers, importing..."); + context?.logger.info("Found developers, importing..."); const devListClean = this.parseWikiStringArray(game.Developers); for (const dev of devListClean) { - context?.log(`Importing "${dev}"...`); + context?.logger.info(`Importing developer "${dev}"...`); const res = await developer(dev); - if (res === undefined) continue; + if (res === undefined) { + context?.logger.warn(`Failed to import developer "${dev}"`); + continue; + } + context?.logger.info(`Imported developer "${dev}"`); developers.push(res); } } @@ -453,7 +463,7 @@ export class PCGamingWikiProvider implements MetadataProvider { images: [icon], }; - context?.log("PCGamingWiki provider finished."); + context?.logger.info("PCGamingWiki provider finished."); context?.progress(100); return metadata; @@ -462,7 +472,7 @@ export class PCGamingWikiProvider implements MetadataProvider { async fetchCompany({ query, createObject, - }: _FetchCompanyMetadataParams): Promise { + }: _FetchCompanyMetadataParams): Promise { const searchParams = new URLSearchParams({ action: "cargoquery", tables: "Company", @@ -496,6 +506,6 @@ export class PCGamingWikiProvider implements MetadataProvider { return metadata; } - throw new Error(`pcgamingwiki failed to find publisher/developer ${query}`); + return undefined; } } diff --git a/server/internal/objects/fsBackend.ts b/server/internal/objects/fsBackend.ts index e8b070a..aedab6a 100644 --- a/server/internal/objects/fsBackend.ts +++ b/server/internal/objects/fsBackend.ts @@ -9,6 +9,8 @@ import prisma from "../db/database"; import cacheHandler from "../cache"; import { systemConfig } from "../config/sys-conf"; import { type } from "arktype"; +import { logger } from "~/server/internal/logging"; +import type pino from "pino"; export class FsObjectBackend extends ObjectBackend { private baseObjectPath: string; @@ -121,7 +123,7 @@ export class FsObjectBackend extends ObjectBackend { const metadataRaw = JSON.parse(fs.readFileSync(metadataPath, "utf-8")); const metadata = objectMetadata(metadataRaw); if (metadata instanceof type.errors) { - console.error("FsObjectBackend#fetchMetadata", metadata.summary); + logger.error("FsObjectBackend#fetchMetadata", metadata.summary); return undefined; } await this.metadataCache.set(id, metadata); @@ -175,23 +177,27 @@ export class FsObjectBackend extends ObjectBackend { return fs.readdirSync(this.baseObjectPath); } - async cleanupMetadata() { + async cleanupMetadata(taskLogger: pino.Logger) { + const cleanupLogger = taskLogger ?? logger; + const metadataFiles = fs.readdirSync(this.baseMetadataPath); const objects = await this.listAll(); const extraFiles = metadataFiles.filter( (file) => !objects.includes(file.replace(/\.json$/, "")), ); - console.log( + cleanupLogger.info( `[FsObjectBackend#cleanupMetadata]: Found ${extraFiles.length} metadata files without corresponding objects.`, ); for (const file of extraFiles) { const filePath = path.join(this.baseMetadataPath, file); try { fs.rmSync(filePath); - console.log(`[FsObjectBackend#cleanupMetadata]: Removed ${file}`); + cleanupLogger.info( + `[FsObjectBackend#cleanupMetadata]: Removed ${file}`, + ); } catch (error) { - console.error( + cleanupLogger.error( `[FsObjectBackend#cleanupMetadata]: Failed to remove ${file}`, error, ); diff --git a/server/internal/objects/objectHandler.ts b/server/internal/objects/objectHandler.ts index 0506f41..eb5379d 100644 --- a/server/internal/objects/objectHandler.ts +++ b/server/internal/objects/objectHandler.ts @@ -16,6 +16,7 @@ import { type } from "arktype"; import { parse as getMimeTypeBuffer } from "file-type-mime"; +import type pino from "pino"; import type { Writable } from "stream"; import { Readable } from "stream"; import { getMimeType as getMimeTypeStream } from "stream-mime-type"; @@ -71,7 +72,7 @@ export abstract class ObjectBackend { ): Promise; abstract fetchHash(id: ObjectReference): Promise; abstract listAll(): Promise; - abstract cleanupMetadata(): Promise; + abstract cleanupMetadata(taskLogger: pino.Logger): Promise; } export class ObjectHandler { @@ -264,7 +265,7 @@ export class ObjectHandler { * This is useful for cleaning up metadata files that are left behinds * @returns */ - async cleanupMetadata() { - return await this.backend.cleanupMetadata(); + async cleanupMetadata(taskLogger: pino.Logger) { + return await this.backend.cleanupMetadata(taskLogger); } } diff --git a/server/internal/objects/transactional.ts b/server/internal/objects/transactional.ts index 080253d..bac9475 100644 --- a/server/internal/objects/transactional.ts +++ b/server/internal/objects/transactional.ts @@ -43,9 +43,9 @@ export class ObjectTransactionalHandler { for (const [id, data] of transaction) { if (typeof data === "string") { - context?.log(`Importing object from "${data}"`); + context?.logger.info(`Importing object from "${data}"`); } else { - context?.log(`Importing raw object...`); + context?.logger.info(`Importing raw object...`); } await objectHandler.createFromSource( id, diff --git a/server/internal/tasks/index.ts b/server/internal/tasks/index.ts index 83a3446..0a64499 100644 --- a/server/internal/tasks/index.ts +++ b/server/internal/tasks/index.ts @@ -10,6 +10,9 @@ import cleanupObjects from "./registry/objects"; import { taskGroups, type TaskGroup } from "./group"; import prisma from "../db/database"; import { type } from "arktype"; +import pino from "pino"; +import { logger } from "~/server/internal/logging"; +import { Writable } from "node:stream"; // a task that has been run type FinishedTask = { @@ -80,7 +83,7 @@ class TaskHandler { // if a task is already running, we don't want to start another if (existingTask.taskGroup === task.taskGroup) { // TODO: handle this more gracefully, maybe with a queue? should be configurable - console.warn( + logger.warn( `Task group ${task.taskGroup} does not allow concurrent tasks. Task ${task.id} will not be started.`, ); throw new Error( @@ -126,16 +129,82 @@ class TaskHandler { }, 100); }); - const log = (entry: string) => { - const taskEntry = this.taskPool.get(task.id); - if (!taskEntry) return; - taskEntry.log.push(msgWithTimestamp(entry)); - updateAllClients(); - }; + const taskPool = this.taskPool; + + // Create a pino transport that replicates the old log function behavior + // const taskLogger = pino({ + // hooks: { + // logMethod(args, method) { + // // Combine all arguments into a single string message + // const message = args.map(String).join(" "); + // const now = new Date(); + + // const pad = (n: number, width = 2) => + // n.toString().padStart(width, "0"); + + // const year = now.getUTCFullYear(); + // const month = pad(now.getUTCMonth() + 1); + // const day = pad(now.getUTCDate()); + + // const hours = pad(now.getUTCHours()); + // const minutes = pad(now.getUTCMinutes()); + // const seconds = pad(now.getUTCSeconds()); + // const milliseconds = pad(now.getUTCMilliseconds(), 3); + + // const logObj = { + // timestamp: `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${milliseconds} UTC`, + // message, + // }; + + // // Push the formatted log string to the task's log array + // const taskEntry = taskPool.get(task.id); + // if (taskEntry) { + // taskEntry.log.push(JSON.stringify(logObj)); + // updateAllClients(); + // } + + // // Optionally, still call the original method if you want logs elsewhere + // method.apply(this, args); + // }, + // }, + // }); + + // Custom writable stream to capture logs + const logStream = new Writable({ + objectMode: true, + write(chunk, encoding, callback) { + try { + // chunk is a stringified JSON log line + const logObj = JSON.parse(chunk.toString()); + const taskEntry = taskPool.get(task.id); + if (taskEntry) { + taskEntry.log.push(JSON.stringify(logObj)); + updateAllClients(); + } + } catch (e) { + // fallback: ignore or log error + logger.error("Failed to parse log chunk", { + error: e, + chunk: chunk, + }); + } + callback(); + }, + }); + + // Use pino with the custom stream + const taskLogger = pino( + { + // You can configure timestamp, level, etc. here + timestamp: pino.stdTimeFunctions.isoTime, + base: null, // Remove pid/hostname if not needed + }, + logStream, + ); const progress = (progress: number) => { if (progress < 0 || progress > 100) { - console.error("Progress must be between 0 and 100", { progress }); + logger.error("Progress must be between 0 and 100", { progress }); return; } const taskEntry = this.taskPool.get(task.id); @@ -165,7 +234,7 @@ class TaskHandler { if (!taskEntry) throw new Error("No task entry"); try { - await task.run({ progress, log }); + await task.run({ progress, logger: taskLogger }); taskEntry.success = true; } catch (error: unknown) { taskEntry.success = false; @@ -226,7 +295,7 @@ class TaskHandler { const allowed = await aclManager.hasACL(request, task.acls); if (!allowed) { - console.warn("user does not have necessary ACLs"); + // logger.warn("user does not have necessary ACLs"); peer.send( `error/${taskId}/Unknown task/Drop couldn't find the task you're looking for.`, ); @@ -304,7 +373,7 @@ class TaskHandler { runTaskGroupByName(name: TaskGroup) { const task = this.taskCreators.get(name); if (!task) { - console.warn(`No task found for group ${name}`); + logger.warn(`No task found for group ${name}`); return; } this.create(task()); @@ -365,17 +434,21 @@ class TaskHandler { export type TaskRunContext = { progress: (progress: number) => void; - log: (message: string) => void; + logger: typeof logger; }; export function wrapTaskContext( context: TaskRunContext, options: { min: number; max: number; prefix: string }, ): TaskRunContext { + const child = context.logger.child({ + prefix: options.prefix, + }); + return { progress(progress) { if (progress > 100 || progress < 0) { - console.warn("[wrapTaskContext] progress must be between 0 and 100"); + logger.warn("[wrapTaskContext] progress must be between 0 and 100"); } // I was too tired to figure this out @@ -385,9 +458,7 @@ export function wrapTaskContext( const adjustedProgress = (progress * newRange) / oldRange + options.min; return context.progress(adjustedProgress); }, - log(message) { - return context.log(options.prefix + message); - }, + logger: child, }; } @@ -431,31 +502,31 @@ export const TaskLog = type({ message: "string", }); -/** - * Create a log message with a timestamp in the format YYYY-MM-DD HH:mm:ss.SSS UTC - * @param message - * @returns - */ -function msgWithTimestamp(message: string): string { - const now = new Date(); +// /** +// * Create a log message with a timestamp in the format YYYY-MM-DD HH:mm:ss.SSS UTC +// * @param message +// * @returns +// */ +// function msgWithTimestamp(message: string): string { +// const now = new Date(); - const pad = (n: number, width = 2) => n.toString().padStart(width, "0"); +// const pad = (n: number, width = 2) => n.toString().padStart(width, "0"); - const year = now.getUTCFullYear(); - const month = pad(now.getUTCMonth() + 1); - const day = pad(now.getUTCDate()); +// const year = now.getUTCFullYear(); +// const month = pad(now.getUTCMonth() + 1); +// const day = pad(now.getUTCDate()); - const hours = pad(now.getUTCHours()); - const minutes = pad(now.getUTCMinutes()); - const seconds = pad(now.getUTCSeconds()); - const milliseconds = pad(now.getUTCMilliseconds(), 3); +// const hours = pad(now.getUTCHours()); +// const minutes = pad(now.getUTCMinutes()); +// const seconds = pad(now.getUTCSeconds()); +// const milliseconds = pad(now.getUTCMilliseconds(), 3); - const log: typeof TaskLog.infer = { - timestamp: `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${milliseconds} UTC`, - message, - }; - return JSON.stringify(log); -} +// const log: typeof TaskLog.infer = { +// timestamp: `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${milliseconds} UTC`, +// message, +// }; +// return JSON.stringify(log); +// } export function defineDropTask(buildTask: BuildTask): DropTask { // TODO: only let one task with the same taskGroup run at the same time if specified diff --git a/server/internal/tasks/registry/invitations.ts b/server/internal/tasks/registry/invitations.ts index 7cac166..a247f7b 100644 --- a/server/internal/tasks/registry/invitations.ts +++ b/server/internal/tasks/registry/invitations.ts @@ -6,8 +6,8 @@ export default defineDropTask({ name: "Cleanup Invitations", acls: ["system:maintenance:read"], taskGroup: "cleanup:invitations", - async run({ log }) { - log("Cleaning invitations"); + async run({ progress, logger }) { + logger.info("Cleaning invitations"); const now = new Date(); @@ -19,6 +19,7 @@ export default defineDropTask({ }, }); - log("Done"); + logger.info("Done"); + progress(100); }, }); diff --git a/server/internal/tasks/registry/objects.ts b/server/internal/tasks/registry/objects.ts index a59e409..63a5e71 100644 --- a/server/internal/tasks/registry/objects.ts +++ b/server/internal/tasks/registry/objects.ts @@ -15,36 +15,38 @@ export default defineDropTask({ name: "Cleanup Objects", acls: ["system:maintenance:read"], taskGroup: "cleanup:objects", - async run({ progress, log }) { - log("Cleaning unreferenced objects"); + async run({ progress, logger }) { + logger.info("Cleaning unreferenced objects"); // get all objects const objects = await objectHandler.listAll(); - log(`searching for ${objects.length} objects`); + logger.info(`searching for ${objects.length} objects`); progress(30); // find unreferenced objects const refMap = buildRefMap(); - log("Building reference map"); - log(`Found ${Object.keys(refMap).length} models with reference fields`); - log("Searching for unreferenced objects"); + logger.info("Building reference map"); + logger.info( + `Found ${Object.keys(refMap).length} models with reference fields`, + ); + logger.info("Searching for unreferenced objects"); const unrefedObjects = await findUnreferencedStrings(objects, refMap); - log(`found ${unrefedObjects.length} Unreferenced objects`); - // console.log(unrefedObjects); + logger.info(`found ${unrefedObjects.length} Unreferenced objects`); + // logger.info(unrefedObjects); progress(60); // remove objects const deletePromises: Promise[] = []; for (const obj of unrefedObjects) { - log(`Deleting object ${obj}`); + logger.info(`Deleting object ${obj}`); deletePromises.push(objectHandler.deleteAsSystem(obj)); } await Promise.all(deletePromises); // Remove any possible leftover metadata - objectHandler.cleanupMetadata(); + await objectHandler.cleanupMetadata(logger); - log("Done"); + logger.info("Done"); progress(100); }, }); diff --git a/server/internal/tasks/registry/sessions.ts b/server/internal/tasks/registry/sessions.ts index 99372c2..6839829 100644 --- a/server/internal/tasks/registry/sessions.ts +++ b/server/internal/tasks/registry/sessions.ts @@ -6,9 +6,10 @@ export default defineDropTask({ name: "Cleanup Sessions", acls: ["system:maintenance:read"], taskGroup: "cleanup:sessions", - async run({ log }) { - log("Cleaning up sessions"); + async run({ progress, logger }) { + logger.info("Cleaning up sessions"); await sessionHandler.cleanupSessions(); - log("Done"); + logger.info("Done"); + progress(100); }, }); diff --git a/server/internal/tasks/registry/update.ts b/server/internal/tasks/registry/update.ts index 39d4134..452fd19 100644 --- a/server/internal/tasks/registry/update.ts +++ b/server/internal/tasks/registry/update.ts @@ -21,38 +21,38 @@ export default defineDropTask({ name: "Check for Update", acls: ["system:maintenance:read"], taskGroup: "check:update", - async run({ log }) { + async run({ progress, logger }) { // TODO: maybe implement some sort of rate limit thing to prevent this from calling github api a bunch in the event of crashloop or whatever? // probably will require custom task scheduler for object cleanup anyway, so something to thing about if (!systemConfig.shouldCheckForUpdates()) { - log("Update check is disabled by configuration"); + logger.info("Update check is disabled by configuration"); + progress(100); return; } - log("Checking for update"); + logger.info("Checking for update"); const currVerStr = systemConfig.getDropVersion(); const currVer = semver.coerce(currVerStr); if (currVer === null) { const msg = "Drop provided a invalid semver tag"; - log(msg); + logger.info(msg); throw new Error(msg); } + progress(30); const response = await fetch( "https://api.github.com/repos/Drop-OSS/drop/releases/latest", ); + progress(50); // if response failed somehow if (!response.ok) { - log( - "Failed to check for update " + - JSON.stringify({ - status: response.status, - body: response.body, - }), - ); + logger.info("Failed to check for update ", { + status: response.status, + body: response.body, + }); throw new Error( `Failed to check for update: ${response.status} ${response.body}`, @@ -63,8 +63,8 @@ export default defineDropTask({ const resJson = await response.json(); const body = latestRelease(resJson); if (body instanceof type.errors) { - log(body.summary); - log("GitHub Api response" + JSON.stringify(resJson)); + logger.info(body.summary); + logger.info("GitHub Api response" + JSON.stringify(resJson)); throw new Error( `GitHub Api response did not match expected schema: ${body.summary}`, ); @@ -74,14 +74,15 @@ export default defineDropTask({ const latestVer = semver.coerce(body.tag_name); if (latestVer === null) { const msg = "Github Api returned invalid semver tag"; - log(msg); + logger.info(msg); throw new Error(msg); } + progress(70); // TODO: handle prerelease identifiers https://github.com/npm/node-semver#prerelease-identifiers // check if is newer version if (semver.gt(latestVer, currVer)) { - log("Update available"); + logger.info("Update available"); notificationSystem.systemPush({ nonce: `drop-update-available-${currVer}-to-${latestVer}`, title: `Update available to v${latestVer}`, @@ -90,9 +91,10 @@ export default defineDropTask({ acls: ["system:notifications:read"], }); } else { - log("no update available"); + logger.info("no update available"); } - log("Done"); + logger.info("Done"); + progress(100); }, }); diff --git a/server/plugins/03.metadata-init.ts b/server/plugins/03.metadata-init.ts index aa47bf3..c9f2616 100644 --- a/server/plugins/03.metadata-init.ts +++ b/server/plugins/03.metadata-init.ts @@ -5,6 +5,7 @@ import { GiantBombProvider } from "../internal/metadata/giantbomb"; import { IGDBProvider } from "../internal/metadata/igdb"; import { ManualMetadataProvider } from "../internal/metadata/manual"; import { PCGamingWikiProvider } from "../internal/metadata/pcgamingwiki"; +import { logger } from "~/server/internal/logging"; export default defineNitroPlugin(async (_nitro) => { const metadataProviders = [ @@ -21,9 +22,9 @@ export default defineNitroPlugin(async (_nitro) => { const id = prov.source(); providers.set(id, prov); - console.log(`enabled metadata provider: ${prov.name()}`); + logger.info(`enabled metadata provider: ${prov.name()}`); } catch (e) { - console.warn(`skipping metadata provider setup: ${e}`); + logger.warn(`skipping metadata provider setup: ${e}`); } } @@ -35,7 +36,7 @@ export default defineNitroPlugin(async (_nitro) => { const priority = max * 2 - index; // Offset by the length --- (max - index) + max const provider = providers.get(providerId); if (!provider) { - console.warn(`failed to add existing metadata provider: ${providerId}`); + logger.warn(`failed to add existing metadata provider: ${providerId}`); continue; } metadataHandler.addProvider(provider, priority); diff --git a/server/plugins/05.library-init.ts b/server/plugins/05.library-init.ts index e0bdcd9..a4830ac 100644 --- a/server/plugins/05.library-init.ts +++ b/server/plugins/05.library-init.ts @@ -7,6 +7,7 @@ import { FilesystemProvider } from "../internal/library/providers/filesystem"; import libraryManager from "../internal/library"; import path from "path"; import { FlatFilesystemProvider } from "../internal/library/providers/flat"; +import { logger } from "~/server/internal/logging"; export const libraryConstructors: { [key in LibraryBackend]: ( @@ -67,14 +68,14 @@ export default defineNitroPlugin(async () => { libraryManager.addLibrary(provider); successes++; } catch (e) { - console.warn( + logger.warn( `Failed to create library (${library.id}) of type ${library.backend}:\n ${e}`, ); } } if (successes == 0) { - console.warn( + logger.warn( "No library was successfully initialised. Please check for errors. If you have just set up an instance, this is normal.", ); } diff --git a/utils/parseTaskLog.ts b/utils/parseTaskLog.ts index 46360eb..92482df 100644 --- a/utils/parseTaskLog.ts +++ b/utils/parseTaskLog.ts @@ -4,7 +4,7 @@ export function parseTaskLog(logStr: string): typeof TaskLog.infer { const log = JSON.parse(logStr); return { - message: log.message, - timestamp: log.timestamp, + message: log.msg, + timestamp: log.time, }; } diff --git a/yarn.lock b/yarn.lock index 354f156..fea4e70 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3035,6 +3035,11 @@ asynckit@^0.4.0: resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== +atomic-sleep@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" + integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== + autoprefixer@^10.4.20, autoprefixer@^10.4.21: version "10.4.21" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.21.tgz#77189468e7a8ad1d9a37fbc08efc9f480cf0a95d" @@ -3491,6 +3496,11 @@ colord@^2.9.3: resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== +colorette@^2.0.7: + version "2.0.20" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" + integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== + colorspace@1.1.x: version "1.1.4" resolved "https://registry.yarnpkg.com/colorspace/-/colorspace-1.1.4.tgz#8d442d1186152f60453bf8070cd66eb364e59243" @@ -3793,6 +3803,11 @@ data-uri-to-buffer@^4.0.0: resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz#d8feb2b2881e6a4f58c2e08acfd0e2834e26222e" integrity sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A== +dateformat@^4.6.3: + version "4.6.3" + resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5" + integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA== + db0@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/db0/-/db0-0.3.2.tgz#f2f19a547ac5519714a510edf0f93daf61ff7e47" @@ -4586,6 +4601,11 @@ extract-zip@^2.0.1: optionalDependencies: "@types/yauzl" "^2.9.1" +fast-copy@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/fast-copy/-/fast-copy-3.0.2.tgz#59c68f59ccbcac82050ba992e0d5c389097c9d35" + integrity sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ== + fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" @@ -4629,6 +4649,16 @@ fast-npm-meta@^0.4.3: resolved "https://registry.yarnpkg.com/fast-npm-meta/-/fast-npm-meta-0.4.3.tgz#8ab0b9ced8e5a60ffca5bca2d0b6e965c14dc706" integrity sha512-eUzR/uVx61fqlHBjG/eQx5mQs7SQObehMTTdq8FAkdCB4KuZSQ6DiZMIrAq4kcibB3WFLQ9c4dT26Vwkix1RKg== +fast-redact@^3.1.1: + version "3.5.0" + resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.5.0.tgz#e9ea02f7e57d0cd8438180083e93077e496285e4" + integrity sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A== + +fast-safe-stringify@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" + integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== + fastq@^1.15.0, fastq@^1.6.0: version "1.19.1" resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.19.1.tgz#d50eaba803c8846a883c16492821ebcd2cda55f5" @@ -5101,6 +5131,11 @@ he@^1.2.0: resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== +help-me@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/help-me/-/help-me-5.0.0.tgz#b1ebe63b967b74060027c2ac61f9be12d354a6f6" + integrity sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg== + hookable@^5.5.3: version "5.5.3" resolved "https://registry.yarnpkg.com/hookable/-/hookable-5.5.3.tgz#6cfc358984a1ef991e2518cb9ed4a778bbd3215d" @@ -5491,6 +5526,11 @@ jiti@2.4.2, jiti@^2.1.2, jiti@^2.4.2: resolved "https://registry.yarnpkg.com/jiti/-/jiti-2.4.2.tgz#d19b7732ebb6116b06e2038da74a55366faef560" integrity sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A== +joycon@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/joycon/-/joycon-3.1.1.tgz#bce8596d6ae808f8b68168f5fc69280996894f03" + integrity sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw== + js-base64@^3.6.0: version "3.7.7" resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-3.7.7.tgz#e51b84bf78fbf5702b9541e2cb7bfcb893b43e79" @@ -6221,7 +6261,7 @@ minimatch@^9.0.0, minimatch@^9.0.3, minimatch@^9.0.4: dependencies: brace-expansion "^2.0.1" -minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5: +minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== @@ -6674,6 +6714,11 @@ on-change@^5.0.1: resolved "https://registry.yarnpkg.com/on-change/-/on-change-5.0.1.tgz#ced60d262211eee41043e7479515b4875d1744ef" integrity sha512-n7THCP7RkyReRSLkJb8kUWoNsxUIBxTkIp3JKno+sEz6o/9AJ3w3P9fzQkITEkMwyTKJjZciF3v/pVoouxZZMg== +on-exit-leak-free@^2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz#fed195c9ebddb7d9e4c3842f93f281ac8dadd3b8" + integrity sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA== + on-finished@^2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" @@ -7002,6 +7047,54 @@ picomatch@^4.0.2: resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.2.tgz#77c742931e8f3b8820946c76cd0c1f13730d1dab" integrity sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg== +pino-abstract-transport@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz#de241578406ac7b8a33ce0d77ae6e8a0b3b68a60" + integrity sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw== + dependencies: + split2 "^4.0.0" + +pino-pretty@^13.0.0: + version "13.0.0" + resolved "https://registry.yarnpkg.com/pino-pretty/-/pino-pretty-13.0.0.tgz#21d57fe940e34f2e279905d7dba2d7e2c4f9bf17" + integrity sha512-cQBBIVG3YajgoUjo1FdKVRX6t9XPxwB9lcNJVD5GCnNM4Y6T12YYx8c6zEejxQsU0wrg9TwmDulcE9LR7qcJqA== + dependencies: + colorette "^2.0.7" + dateformat "^4.6.3" + fast-copy "^3.0.2" + fast-safe-stringify "^2.1.1" + help-me "^5.0.0" + joycon "^3.1.1" + minimist "^1.2.6" + on-exit-leak-free "^2.1.0" + pino-abstract-transport "^2.0.0" + pump "^3.0.0" + secure-json-parse "^2.4.0" + sonic-boom "^4.0.1" + strip-json-comments "^3.1.1" + +pino-std-serializers@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz#7c625038b13718dbbd84ab446bd673dc52259e3b" + integrity sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA== + +pino@^9.7.0: + version "9.7.0" + resolved "https://registry.yarnpkg.com/pino/-/pino-9.7.0.tgz#ff7cd86eb3103ee620204dbd5ca6ffda8b53f645" + integrity sha512-vnMCM6xZTb1WDmLvtG2lE/2p+t9hDEIvTWJsu6FejkE62vB7gDhvzrpFR4Cw2to+9JNQxVnkAKVPA1KPB98vWg== + dependencies: + atomic-sleep "^1.0.0" + fast-redact "^3.1.1" + on-exit-leak-free "^2.1.0" + pino-abstract-transport "^2.0.0" + pino-std-serializers "^7.0.0" + process-warning "^5.0.0" + quick-format-unescaped "^4.0.3" + real-require "^0.2.0" + safe-stable-stringify "^2.3.1" + sonic-boom "^4.0.1" + thread-stream "^3.0.0" + pkg-types@^1.0.3, pkg-types@^1.3.0: version "1.3.1" resolved "https://registry.yarnpkg.com/pkg-types/-/pkg-types-1.3.1.tgz#bd7cc70881192777eef5326c19deb46e890917df" @@ -7338,6 +7431,11 @@ process-nextick-args@~2.0.0: resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== +process-warning@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-5.0.0.tgz#566e0bf79d1dff30a72d8bbbe9e8ecefe8d378d7" + integrity sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA== + process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" @@ -7391,6 +7489,11 @@ queue-microtask@^1.2.2: resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== +quick-format-unescaped@^4.0.3: + version "4.0.4" + resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz#93ef6dd8d3453cbc7970dd614fad4c5954d6b5a7" + integrity sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg== + quote-unquote@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/quote-unquote/-/quote-unquote-1.0.0.tgz#67a9a77148effeaf81a4d428404a710baaac8a0b" @@ -7510,6 +7613,11 @@ readdirp@~3.6.0: dependencies: picomatch "^2.2.1" +real-require@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/real-require/-/real-require-0.2.0.tgz#209632dea1810be2ae063a6ac084fee7e33fba78" + integrity sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg== + redis-errors@^1.0.0, redis-errors@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/redis-errors/-/redis-errors-1.2.0.tgz#eb62d2adb15e4eaf4610c04afe1529384250abad" @@ -7725,6 +7833,11 @@ scule@^1.3.0: resolved "https://registry.yarnpkg.com/scule/-/scule-1.3.0.tgz#6efbd22fd0bb801bdcc585c89266a7d2daa8fbd3" integrity sha512-6FtHJEvt+pVMIB9IBY+IcCJ6Z5f1iQnytgyfKMhDKgmzYG+TeH/wx1y3l27rshSbLiSanrR9ffZDrEsmjlQF2g== +secure-json-parse@^2.4.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/secure-json-parse/-/secure-json-parse-2.7.0.tgz#5a5f9cd6ae47df23dba3151edd06855d47e09862" + integrity sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw== + semver@^6.3.1: version "6.3.1" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" @@ -7911,6 +8024,13 @@ smob@^1.0.0: resolved "https://registry.yarnpkg.com/smob/-/smob-1.5.0.tgz#85d79a1403abf128d24d3ebc1cdc5e1a9548d3ab" integrity sha512-g6T+p7QO8npa+/hNx9ohv1E5pVCmWrVCUzUXJyLdMmftX6ER0oiWY/w9knEonLpnOp6b6FenKnMfR8gqwWdwig== +sonic-boom@^4.0.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-4.2.0.tgz#e59a525f831210fa4ef1896428338641ac1c124d" + integrity sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww== + dependencies: + atomic-sleep "^1.0.0" + sortablejs@1.14.0: version "1.14.0" resolved "https://registry.yarnpkg.com/sortablejs/-/sortablejs-1.14.0.tgz#6d2e17ccbdb25f464734df621d4f35d4ab35b3d8" @@ -7978,6 +8098,11 @@ speakingurl@^14.0.1: resolved "https://registry.yarnpkg.com/speakingurl/-/speakingurl-14.0.1.tgz#f37ec8ddc4ab98e9600c1c9ec324a8c48d772a53" integrity sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ== +split2@^4.0.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4" + integrity sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg== + stable-hash@^0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/stable-hash/-/stable-hash-0.0.5.tgz#94e8837aaeac5b4d0f631d2972adef2924b40269" @@ -8281,6 +8406,13 @@ text-hex@1.0.x: resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5" integrity sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg== +thread-stream@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/thread-stream/-/thread-stream-3.1.0.tgz#4b2ef252a7c215064507d4ef70c05a5e2d34c4f1" + integrity sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A== + dependencies: + real-require "^0.2.0" + through2@4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/through2/-/through2-4.0.2.tgz#a7ce3ac2a7a8b0b966c80e7c49f0484c3b239764"