mirror of
https://github.com/Drop-OSS/drop.git
synced 2025-11-10 04:22:09 +10:00
Logging (#131)
* ci: pull version from package.json on build * fix: implicit any type * feat: inital support for logger * style: fix lint * feat: move more logging over to pino * fix: logging around company importing
This commit is contained in:
@ -7,6 +7,7 @@ import authManager, {
|
||||
checkHashArgon2,
|
||||
checkHashBcrypt,
|
||||
} from "~/server/internal/auth";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
const signinValidator = type({
|
||||
username: "string",
|
||||
@ -28,7 +29,7 @@ export default defineEventHandler<{
|
||||
const body = signinValidator(await readBody(h3));
|
||||
if (body instanceof type.errors) {
|
||||
// hover out.summary to see validation errors
|
||||
console.error(body.summary);
|
||||
logger.error(body.summary);
|
||||
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import notificationSystem from "~/server/internal/notifications";
|
||||
import aclManager from "~/server/internal/acls";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
// TODO add web socket sessions for horizontal scaling
|
||||
// Peer ID to user ID
|
||||
@ -29,7 +30,7 @@ export default defineWebSocketHandler({
|
||||
async close(peer, _details) {
|
||||
const userId = socketSessions.get(peer.id);
|
||||
if (!userId) {
|
||||
console.log(`skipping websocket close for ${peer.id}`);
|
||||
logger.info(`skipping websocket close for ${peer.id}`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@ -134,9 +134,6 @@ class ACLManager {
|
||||
if (tokenACLIndex != -1) return token.userId;
|
||||
}
|
||||
|
||||
console.log(token);
|
||||
console.log(acls);
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { AuthMec } from "~/prisma/client";
|
||||
import { OIDCManager } from "./oidc";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
class AuthManager {
|
||||
private authProviders: {
|
||||
@ -21,7 +22,7 @@ class AuthManager {
|
||||
};
|
||||
|
||||
constructor() {
|
||||
console.log("AuthManager initialized");
|
||||
logger.info("AuthManager initialized");
|
||||
}
|
||||
|
||||
async init() {
|
||||
@ -31,9 +32,9 @@ class AuthManager {
|
||||
if (!object) break;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(this.authProviders as any)[key] = object;
|
||||
console.log(`enabled auth: ${key}`);
|
||||
logger.info(`enabled auth: ${key}`);
|
||||
} catch (e) {
|
||||
console.warn(e);
|
||||
logger.warn(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -6,6 +6,7 @@ import objectHandler from "../../objects";
|
||||
import type { Readable } from "stream";
|
||||
import * as jdenticon from "jdenticon";
|
||||
import { systemConfig } from "../../config/sys-conf";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
interface OIDCWellKnown {
|
||||
authorization_endpoint: string;
|
||||
@ -206,7 +207,7 @@ export class OIDCManager {
|
||||
|
||||
return { user, options: session.options };
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
logger.error(e);
|
||||
return `Request to identity provider failed: ${e}`;
|
||||
}
|
||||
}
|
||||
|
||||
@ -223,7 +223,7 @@ class LibraryManager {
|
||||
taskGroup: "import:game",
|
||||
name: `Importing version ${versionName} for ${game.mName}`,
|
||||
acls: ["system:import:version:read"],
|
||||
async run({ progress, log }) {
|
||||
async run({ progress, logger }) {
|
||||
// First, create the manifest via droplet.
|
||||
// This takes up 90% of our progress, so we wrap it in a *0.9
|
||||
const manifest = await library.generateDropletManifest(
|
||||
@ -235,11 +235,11 @@ class LibraryManager {
|
||||
},
|
||||
(err, value) => {
|
||||
if (err) throw err;
|
||||
log(value);
|
||||
logger.info(value);
|
||||
},
|
||||
);
|
||||
|
||||
log("Created manifest successfully!");
|
||||
logger.info("Created manifest successfully!");
|
||||
|
||||
const currentIndex = await prisma.gameVersion.count({
|
||||
where: { gameId: gameId },
|
||||
@ -282,7 +282,7 @@ class LibraryManager {
|
||||
});
|
||||
}
|
||||
|
||||
log("Successfully created version!");
|
||||
logger.info("Successfully created version!");
|
||||
|
||||
notificationSystem.systemPush({
|
||||
nonce: `version-create-${gameId}-${versionName}`,
|
||||
|
||||
12
server/internal/logging/index.ts
Normal file
12
server/internal/logging/index.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import pino from "pino";
|
||||
|
||||
export const logger = pino({
|
||||
transport: {
|
||||
target: "pino-pretty",
|
||||
options: {
|
||||
colorize: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
logger.child({});
|
||||
@ -169,7 +169,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
{ id, publisher, developer, createObject }: _FetchGameMetadataParams,
|
||||
context?: TaskRunContext,
|
||||
): Promise<GameMetadata> {
|
||||
context?.log("Using GiantBomb provider");
|
||||
context?.logger.info("Using GiantBomb provider");
|
||||
|
||||
const result = await this.request<GameResult>("game", id, {});
|
||||
const gameData = result.data.results;
|
||||
@ -181,10 +181,14 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
const publishers: Company[] = [];
|
||||
if (gameData.publishers) {
|
||||
for (const pub of gameData.publishers) {
|
||||
context?.log(`Importing publisher "${pub.name}"`);
|
||||
context?.logger.info(`Importing publisher "${pub.name}"`);
|
||||
|
||||
const res = await publisher(pub.name);
|
||||
if (res === undefined) continue;
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(`Failed to import publisher "${pub}"`);
|
||||
continue;
|
||||
}
|
||||
context?.logger.info(`Imported publisher "${pub}"`);
|
||||
publishers.push(res);
|
||||
}
|
||||
}
|
||||
@ -194,10 +198,14 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
const developers: Company[] = [];
|
||||
if (gameData.developers) {
|
||||
for (const dev of gameData.developers) {
|
||||
context?.log(`Importing developer "${dev.name}"`);
|
||||
context?.logger.info(`Importing developer "${dev.name}"`);
|
||||
|
||||
const res = await developer(dev.name);
|
||||
if (res === undefined) continue;
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(`Failed to import developer "${dev}"`);
|
||||
continue;
|
||||
}
|
||||
context?.logger.info(`Imported developer "${dev}"`);
|
||||
developers.push(res);
|
||||
}
|
||||
}
|
||||
@ -211,7 +219,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
|
||||
const images = [banner, ...imageURLs.map(createObject)];
|
||||
|
||||
context?.log(`Found all images. Total of ${images.length + 1}.`);
|
||||
context?.logger.info(`Found all images. Total of ${images.length + 1}.`);
|
||||
|
||||
const releaseDate = gameData.original_release_date
|
||||
? DateTime.fromISO(gameData.original_release_date).toJSDate()
|
||||
@ -225,7 +233,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
|
||||
const reviews: GameMetadataRating[] = [];
|
||||
if (gameData.reviews) {
|
||||
context?.log("Found reviews, importing...");
|
||||
context?.logger.info("Found reviews, importing...");
|
||||
for (const { api_detail_url } of gameData.reviews) {
|
||||
const reviewId = api_detail_url.split("/").at(-2);
|
||||
if (!reviewId) continue;
|
||||
@ -260,7 +268,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
images,
|
||||
};
|
||||
|
||||
context?.log("GiantBomb provider finished.");
|
||||
context?.logger.info("GiantBomb provider finished.");
|
||||
context?.progress(100);
|
||||
|
||||
return metadata;
|
||||
@ -268,7 +276,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
async fetchCompany({
|
||||
query,
|
||||
createObject,
|
||||
}: _FetchCompanyMetadataParams): Promise<CompanyMetadata> {
|
||||
}: _FetchCompanyMetadataParams): Promise<CompanyMetadata | undefined> {
|
||||
const results = await this.request<Array<CompanySearchResult>>(
|
||||
"search",
|
||||
"",
|
||||
@ -279,7 +287,7 @@ export class GiantBombProvider implements MetadataProvider {
|
||||
const company =
|
||||
results.data.results.find((e) => e.name == query) ??
|
||||
results.data.results.at(0);
|
||||
if (!company) throw new Error(`No results for "${query}"`);
|
||||
if (!company) return undefined;
|
||||
|
||||
const longDescription = company.description
|
||||
? this.turndown.turndown(company.description)
|
||||
|
||||
@ -14,6 +14,7 @@ import axios from "axios";
|
||||
import { DateTime } from "luxon";
|
||||
import * as jdenticon from "jdenticon";
|
||||
import type { TaskRunContext } from "../tasks";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
type IGDBID = number;
|
||||
|
||||
@ -163,7 +164,7 @@ export class IGDBProvider implements MetadataProvider {
|
||||
}
|
||||
|
||||
private async authWithTwitch() {
|
||||
console.log("IGDB authorizing with twitch");
|
||||
logger.info("IGDB authorizing with twitch");
|
||||
const params = new URLSearchParams({
|
||||
client_id: this.clientId,
|
||||
client_secret: this.clientSecret,
|
||||
@ -186,7 +187,7 @@ export class IGDBProvider implements MetadataProvider {
|
||||
seconds: response.data.expires_in,
|
||||
});
|
||||
|
||||
console.log("IDGB done authorizing with twitch");
|
||||
logger.info("IDGB done authorizing with twitch");
|
||||
}
|
||||
|
||||
private async refreshCredentials() {
|
||||
@ -354,16 +355,16 @@ export class IGDBProvider implements MetadataProvider {
|
||||
const currentGame = (await this.request<IGDBGameFull>("games", body)).at(0);
|
||||
if (!currentGame) throw new Error("No game found on IGDB with that id");
|
||||
|
||||
context?.log("Using IDGB provider.");
|
||||
context?.logger.info("Using IDGB provider.");
|
||||
|
||||
let iconRaw;
|
||||
const cover = currentGame.cover;
|
||||
|
||||
if (cover !== undefined) {
|
||||
context?.log("Found cover URL, using...");
|
||||
context?.logger.info("Found cover URL, using...");
|
||||
iconRaw = await this.getCoverURL(cover);
|
||||
} else {
|
||||
context?.log("Missing cover URL, using fallback...");
|
||||
context?.logger.info("Missing cover URL, using fallback...");
|
||||
iconRaw = jdenticon.toPng(id, 512);
|
||||
}
|
||||
|
||||
@ -400,7 +401,7 @@ export class IGDBProvider implements MetadataProvider {
|
||||
>("companies", `where id = ${foundInvolved.company}; fields name;`);
|
||||
|
||||
for (const company of findCompanyResponse) {
|
||||
context?.log(
|
||||
context?.logger.info(
|
||||
`Found involved company "${company.name}" as: ${foundInvolved.developer ? "developer, " : ""}${foundInvolved.publisher ? "publisher" : ""}`,
|
||||
);
|
||||
|
||||
@ -408,13 +409,25 @@ export class IGDBProvider implements MetadataProvider {
|
||||
// CANNOT use else since a company can be both
|
||||
if (foundInvolved.developer) {
|
||||
const res = await developer(company.name);
|
||||
if (res === undefined) continue;
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(
|
||||
`Failed to import developer "${company.name}"`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
context?.logger.info(`Imported developer "${company.name}"`);
|
||||
developers.push(res);
|
||||
}
|
||||
|
||||
if (foundInvolved.publisher) {
|
||||
const res = await publisher(company.name);
|
||||
if (res === undefined) continue;
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(
|
||||
`Failed to import publisher "${company.name}"`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
context?.logger.info(`Imported publisher "${company.name}"`);
|
||||
publishers.push(res);
|
||||
}
|
||||
}
|
||||
@ -461,7 +474,7 @@ export class IGDBProvider implements MetadataProvider {
|
||||
images,
|
||||
};
|
||||
|
||||
context?.log("IGDB provider finished.");
|
||||
context?.logger.info("IGDB provider finished.");
|
||||
context?.progress(100);
|
||||
|
||||
return metadata;
|
||||
@ -469,7 +482,7 @@ export class IGDBProvider implements MetadataProvider {
|
||||
async fetchCompany({
|
||||
query,
|
||||
createObject,
|
||||
}: _FetchCompanyMetadataParams): Promise<CompanyMetadata> {
|
||||
}: _FetchCompanyMetadataParams): Promise<CompanyMetadata | undefined> {
|
||||
const response = await this.request<IGDBCompany>(
|
||||
"companies",
|
||||
`where name = "${query}"; fields *; limit 1;`,
|
||||
@ -503,6 +516,6 @@ export class IGDBProvider implements MetadataProvider {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
throw new Error(`igdb failed to find publisher/developer ${query}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -16,6 +16,7 @@ import type { TaskRunContext } from "../tasks";
|
||||
import taskHandler, { wrapTaskContext } from "../tasks";
|
||||
import { randomUUID } from "crypto";
|
||||
import { fuzzy } from "fast-fuzzy";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
export class MissingMetadataProviderConfig extends Error {
|
||||
private providerName: string;
|
||||
@ -89,7 +90,7 @@ export class MetadataHandler {
|
||||
);
|
||||
resolve(mappedResults);
|
||||
} catch (e) {
|
||||
console.warn(e);
|
||||
logger.warn(e);
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
@ -187,7 +188,7 @@ export class MetadataHandler {
|
||||
taskGroup: "import:game",
|
||||
acls: ["system:import:game:read"],
|
||||
async run(context) {
|
||||
const { progress, log } = context;
|
||||
const { progress, logger } = context;
|
||||
|
||||
progress(0);
|
||||
|
||||
@ -262,12 +263,12 @@ export class MetadataHandler {
|
||||
});
|
||||
|
||||
progress(63);
|
||||
log(`Successfully fetched all metadata.`);
|
||||
log(`Importing objects...`);
|
||||
logger.info(`Successfully fetched all metadata.`);
|
||||
logger.info(`Importing objects...`);
|
||||
|
||||
await pullObjects();
|
||||
|
||||
log(`Finished game import.`);
|
||||
logger.info(`Finished game import.`);
|
||||
},
|
||||
});
|
||||
|
||||
@ -301,7 +302,7 @@ export class MetadataHandler {
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(e);
|
||||
logger.warn(e);
|
||||
dumpObjects();
|
||||
continue;
|
||||
}
|
||||
@ -337,9 +338,6 @@ export class MetadataHandler {
|
||||
return object;
|
||||
}
|
||||
|
||||
// throw new Error(
|
||||
// `No metadata provider found a ${databaseName} for "${query}"`,
|
||||
// );
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -44,7 +44,7 @@ export class ManualMetadataProvider implements MetadataProvider {
|
||||
}
|
||||
async fetchCompany(
|
||||
_params: _FetchCompanyMetadataParams,
|
||||
): Promise<CompanyMetadata> {
|
||||
throw new Error("Method not implemented.");
|
||||
): Promise<CompanyMetadata | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -16,6 +16,7 @@ import { DateTime } from "luxon";
|
||||
import * as cheerio from "cheerio";
|
||||
import { type } from "arktype";
|
||||
import type { TaskRunContext } from "../tasks";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
interface PCGamingWikiParseRawPage {
|
||||
parse: {
|
||||
@ -184,7 +185,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
let matches;
|
||||
if ((matches = opencriticRegex.exec(url.pathname)) !== null) {
|
||||
matches.forEach((match, _groupIndex) => {
|
||||
// console.log(`Found match, group ${_groupIndex}: ${match}`);
|
||||
// logger.log(`Found match, group ${_groupIndex}: ${match}`);
|
||||
id = match;
|
||||
});
|
||||
}
|
||||
@ -199,7 +200,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
return url.pathname.replace("/games/", "").replace(/\/$/, "");
|
||||
}
|
||||
default: {
|
||||
console.warn("Pcgamingwiki, unknown host", url.hostname);
|
||||
logger.warn("Pcgamingwiki, unknown host", url.hostname);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@ -223,7 +224,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
|
||||
const href = reviewEle.attr("href");
|
||||
if (!href) {
|
||||
console.log(
|
||||
logger.info(
|
||||
`pcgamingwiki: failed to properly get review href for ${source}`,
|
||||
);
|
||||
return undefined;
|
||||
@ -232,7 +233,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
rating: reviewEle.text().trim(),
|
||||
});
|
||||
if (ratingObj instanceof type.errors) {
|
||||
console.log(
|
||||
logger.info(
|
||||
"pcgamingwiki: failed to properly get review rating",
|
||||
ratingObj.summary,
|
||||
);
|
||||
@ -374,7 +375,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
{ id, name, publisher, developer, createObject }: _FetchGameMetadataParams,
|
||||
context?: TaskRunContext,
|
||||
): Promise<GameMetadata> {
|
||||
context?.log("Using PCGamingWiki provider");
|
||||
context?.logger.info("Using PCGamingWiki provider");
|
||||
context?.progress(0);
|
||||
|
||||
const searchParams = new URLSearchParams({
|
||||
@ -397,13 +398,18 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
|
||||
const publishers: Company[] = [];
|
||||
if (game.Publishers !== null) {
|
||||
context?.log("Found publishers, importing...");
|
||||
context?.logger.info("Found publishers, importing...");
|
||||
const pubListClean = this.parseWikiStringArray(game.Publishers);
|
||||
for (const pub of pubListClean) {
|
||||
context?.log(`Importing "${pub}"...`);
|
||||
context?.logger.info(`Importing publisher "${pub}"...`);
|
||||
|
||||
const res = await publisher(pub);
|
||||
if (res === undefined) continue;
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(`Failed to import publisher "${pub}"`);
|
||||
continue;
|
||||
}
|
||||
context?.logger.info(`Imported publisher "${pub}"`);
|
||||
// add to publishers
|
||||
publishers.push(res);
|
||||
}
|
||||
}
|
||||
@ -412,12 +418,16 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
|
||||
const developers: Company[] = [];
|
||||
if (game.Developers !== null) {
|
||||
context?.log("Found developers, importing...");
|
||||
context?.logger.info("Found developers, importing...");
|
||||
const devListClean = this.parseWikiStringArray(game.Developers);
|
||||
for (const dev of devListClean) {
|
||||
context?.log(`Importing "${dev}"...`);
|
||||
context?.logger.info(`Importing developer "${dev}"...`);
|
||||
const res = await developer(dev);
|
||||
if (res === undefined) continue;
|
||||
if (res === undefined) {
|
||||
context?.logger.warn(`Failed to import developer "${dev}"`);
|
||||
continue;
|
||||
}
|
||||
context?.logger.info(`Imported developer "${dev}"`);
|
||||
developers.push(res);
|
||||
}
|
||||
}
|
||||
@ -453,7 +463,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
images: [icon],
|
||||
};
|
||||
|
||||
context?.log("PCGamingWiki provider finished.");
|
||||
context?.logger.info("PCGamingWiki provider finished.");
|
||||
context?.progress(100);
|
||||
|
||||
return metadata;
|
||||
@ -462,7 +472,7 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
async fetchCompany({
|
||||
query,
|
||||
createObject,
|
||||
}: _FetchCompanyMetadataParams): Promise<CompanyMetadata> {
|
||||
}: _FetchCompanyMetadataParams): Promise<CompanyMetadata | undefined> {
|
||||
const searchParams = new URLSearchParams({
|
||||
action: "cargoquery",
|
||||
tables: "Company",
|
||||
@ -496,6 +506,6 @@ export class PCGamingWikiProvider implements MetadataProvider {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
throw new Error(`pcgamingwiki failed to find publisher/developer ${query}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,6 +9,8 @@ import prisma from "../db/database";
|
||||
import cacheHandler from "../cache";
|
||||
import { systemConfig } from "../config/sys-conf";
|
||||
import { type } from "arktype";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
import type pino from "pino";
|
||||
|
||||
export class FsObjectBackend extends ObjectBackend {
|
||||
private baseObjectPath: string;
|
||||
@ -121,7 +123,7 @@ export class FsObjectBackend extends ObjectBackend {
|
||||
const metadataRaw = JSON.parse(fs.readFileSync(metadataPath, "utf-8"));
|
||||
const metadata = objectMetadata(metadataRaw);
|
||||
if (metadata instanceof type.errors) {
|
||||
console.error("FsObjectBackend#fetchMetadata", metadata.summary);
|
||||
logger.error("FsObjectBackend#fetchMetadata", metadata.summary);
|
||||
return undefined;
|
||||
}
|
||||
await this.metadataCache.set(id, metadata);
|
||||
@ -175,23 +177,27 @@ export class FsObjectBackend extends ObjectBackend {
|
||||
return fs.readdirSync(this.baseObjectPath);
|
||||
}
|
||||
|
||||
async cleanupMetadata() {
|
||||
async cleanupMetadata(taskLogger: pino.Logger) {
|
||||
const cleanupLogger = taskLogger ?? logger;
|
||||
|
||||
const metadataFiles = fs.readdirSync(this.baseMetadataPath);
|
||||
const objects = await this.listAll();
|
||||
|
||||
const extraFiles = metadataFiles.filter(
|
||||
(file) => !objects.includes(file.replace(/\.json$/, "")),
|
||||
);
|
||||
console.log(
|
||||
cleanupLogger.info(
|
||||
`[FsObjectBackend#cleanupMetadata]: Found ${extraFiles.length} metadata files without corresponding objects.`,
|
||||
);
|
||||
for (const file of extraFiles) {
|
||||
const filePath = path.join(this.baseMetadataPath, file);
|
||||
try {
|
||||
fs.rmSync(filePath);
|
||||
console.log(`[FsObjectBackend#cleanupMetadata]: Removed ${file}`);
|
||||
cleanupLogger.info(
|
||||
`[FsObjectBackend#cleanupMetadata]: Removed ${file}`,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
cleanupLogger.error(
|
||||
`[FsObjectBackend#cleanupMetadata]: Failed to remove ${file}`,
|
||||
error,
|
||||
);
|
||||
|
||||
@ -16,6 +16,7 @@
|
||||
|
||||
import { type } from "arktype";
|
||||
import { parse as getMimeTypeBuffer } from "file-type-mime";
|
||||
import type pino from "pino";
|
||||
import type { Writable } from "stream";
|
||||
import { Readable } from "stream";
|
||||
import { getMimeType as getMimeTypeStream } from "stream-mime-type";
|
||||
@ -71,7 +72,7 @@ export abstract class ObjectBackend {
|
||||
): Promise<boolean>;
|
||||
abstract fetchHash(id: ObjectReference): Promise<string | undefined>;
|
||||
abstract listAll(): Promise<string[]>;
|
||||
abstract cleanupMetadata(): Promise<void>;
|
||||
abstract cleanupMetadata(taskLogger: pino.Logger): Promise<void>;
|
||||
}
|
||||
|
||||
export class ObjectHandler {
|
||||
@ -264,7 +265,7 @@ export class ObjectHandler {
|
||||
* This is useful for cleaning up metadata files that are left behinds
|
||||
* @returns
|
||||
*/
|
||||
async cleanupMetadata() {
|
||||
return await this.backend.cleanupMetadata();
|
||||
async cleanupMetadata(taskLogger: pino.Logger) {
|
||||
return await this.backend.cleanupMetadata(taskLogger);
|
||||
}
|
||||
}
|
||||
|
||||
@ -43,9 +43,9 @@ export class ObjectTransactionalHandler {
|
||||
|
||||
for (const [id, data] of transaction) {
|
||||
if (typeof data === "string") {
|
||||
context?.log(`Importing object from "${data}"`);
|
||||
context?.logger.info(`Importing object from "${data}"`);
|
||||
} else {
|
||||
context?.log(`Importing raw object...`);
|
||||
context?.logger.info(`Importing raw object...`);
|
||||
}
|
||||
await objectHandler.createFromSource(
|
||||
id,
|
||||
|
||||
@ -10,6 +10,9 @@ import cleanupObjects from "./registry/objects";
|
||||
import { taskGroups, type TaskGroup } from "./group";
|
||||
import prisma from "../db/database";
|
||||
import { type } from "arktype";
|
||||
import pino from "pino";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
import { Writable } from "node:stream";
|
||||
|
||||
// a task that has been run
|
||||
type FinishedTask = {
|
||||
@ -80,7 +83,7 @@ class TaskHandler {
|
||||
// if a task is already running, we don't want to start another
|
||||
if (existingTask.taskGroup === task.taskGroup) {
|
||||
// TODO: handle this more gracefully, maybe with a queue? should be configurable
|
||||
console.warn(
|
||||
logger.warn(
|
||||
`Task group ${task.taskGroup} does not allow concurrent tasks. Task ${task.id} will not be started.`,
|
||||
);
|
||||
throw new Error(
|
||||
@ -126,16 +129,82 @@ class TaskHandler {
|
||||
}, 100);
|
||||
});
|
||||
|
||||
const log = (entry: string) => {
|
||||
const taskEntry = this.taskPool.get(task.id);
|
||||
if (!taskEntry) return;
|
||||
taskEntry.log.push(msgWithTimestamp(entry));
|
||||
updateAllClients();
|
||||
};
|
||||
const taskPool = this.taskPool;
|
||||
|
||||
// Create a pino transport that replicates the old log function behavior
|
||||
// const taskLogger = pino({
|
||||
// hooks: {
|
||||
// logMethod(args, method) {
|
||||
// // Combine all arguments into a single string message
|
||||
// const message = args.map(String).join(" ");
|
||||
// const now = new Date();
|
||||
|
||||
// const pad = (n: number, width = 2) =>
|
||||
// n.toString().padStart(width, "0");
|
||||
|
||||
// const year = now.getUTCFullYear();
|
||||
// const month = pad(now.getUTCMonth() + 1);
|
||||
// const day = pad(now.getUTCDate());
|
||||
|
||||
// const hours = pad(now.getUTCHours());
|
||||
// const minutes = pad(now.getUTCMinutes());
|
||||
// const seconds = pad(now.getUTCSeconds());
|
||||
// const milliseconds = pad(now.getUTCMilliseconds(), 3);
|
||||
|
||||
// const logObj = {
|
||||
// timestamp: `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${milliseconds} UTC`,
|
||||
// message,
|
||||
// };
|
||||
|
||||
// // Push the formatted log string to the task's log array
|
||||
// const taskEntry = taskPool.get(task.id);
|
||||
// if (taskEntry) {
|
||||
// taskEntry.log.push(JSON.stringify(logObj));
|
||||
// updateAllClients();
|
||||
// }
|
||||
|
||||
// // Optionally, still call the original method if you want logs elsewhere
|
||||
// method.apply(this, args);
|
||||
// },
|
||||
// },
|
||||
// });
|
||||
|
||||
// Custom writable stream to capture logs
|
||||
const logStream = new Writable({
|
||||
objectMode: true,
|
||||
write(chunk, encoding, callback) {
|
||||
try {
|
||||
// chunk is a stringified JSON log line
|
||||
const logObj = JSON.parse(chunk.toString());
|
||||
const taskEntry = taskPool.get(task.id);
|
||||
if (taskEntry) {
|
||||
taskEntry.log.push(JSON.stringify(logObj));
|
||||
updateAllClients();
|
||||
}
|
||||
} catch (e) {
|
||||
// fallback: ignore or log error
|
||||
logger.error("Failed to parse log chunk", {
|
||||
error: e,
|
||||
chunk: chunk,
|
||||
});
|
||||
}
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
// Use pino with the custom stream
|
||||
const taskLogger = pino(
|
||||
{
|
||||
// You can configure timestamp, level, etc. here
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
base: null, // Remove pid/hostname if not needed
|
||||
},
|
||||
logStream,
|
||||
);
|
||||
|
||||
const progress = (progress: number) => {
|
||||
if (progress < 0 || progress > 100) {
|
||||
console.error("Progress must be between 0 and 100", { progress });
|
||||
logger.error("Progress must be between 0 and 100", { progress });
|
||||
return;
|
||||
}
|
||||
const taskEntry = this.taskPool.get(task.id);
|
||||
@ -165,7 +234,7 @@ class TaskHandler {
|
||||
if (!taskEntry) throw new Error("No task entry");
|
||||
|
||||
try {
|
||||
await task.run({ progress, log });
|
||||
await task.run({ progress, logger: taskLogger });
|
||||
taskEntry.success = true;
|
||||
} catch (error: unknown) {
|
||||
taskEntry.success = false;
|
||||
@ -226,7 +295,7 @@ class TaskHandler {
|
||||
|
||||
const allowed = await aclManager.hasACL(request, task.acls);
|
||||
if (!allowed) {
|
||||
console.warn("user does not have necessary ACLs");
|
||||
// logger.warn("user does not have necessary ACLs");
|
||||
peer.send(
|
||||
`error/${taskId}/Unknown task/Drop couldn't find the task you're looking for.`,
|
||||
);
|
||||
@ -304,7 +373,7 @@ class TaskHandler {
|
||||
runTaskGroupByName(name: TaskGroup) {
|
||||
const task = this.taskCreators.get(name);
|
||||
if (!task) {
|
||||
console.warn(`No task found for group ${name}`);
|
||||
logger.warn(`No task found for group ${name}`);
|
||||
return;
|
||||
}
|
||||
this.create(task());
|
||||
@ -365,17 +434,21 @@ class TaskHandler {
|
||||
|
||||
export type TaskRunContext = {
|
||||
progress: (progress: number) => void;
|
||||
log: (message: string) => void;
|
||||
logger: typeof logger;
|
||||
};
|
||||
|
||||
export function wrapTaskContext(
|
||||
context: TaskRunContext,
|
||||
options: { min: number; max: number; prefix: string },
|
||||
): TaskRunContext {
|
||||
const child = context.logger.child({
|
||||
prefix: options.prefix,
|
||||
});
|
||||
|
||||
return {
|
||||
progress(progress) {
|
||||
if (progress > 100 || progress < 0) {
|
||||
console.warn("[wrapTaskContext] progress must be between 0 and 100");
|
||||
logger.warn("[wrapTaskContext] progress must be between 0 and 100");
|
||||
}
|
||||
|
||||
// I was too tired to figure this out
|
||||
@ -385,9 +458,7 @@ export function wrapTaskContext(
|
||||
const adjustedProgress = (progress * newRange) / oldRange + options.min;
|
||||
return context.progress(adjustedProgress);
|
||||
},
|
||||
log(message) {
|
||||
return context.log(options.prefix + message);
|
||||
},
|
||||
logger: child,
|
||||
};
|
||||
}
|
||||
|
||||
@ -431,31 +502,31 @@ export const TaskLog = type({
|
||||
message: "string",
|
||||
});
|
||||
|
||||
/**
|
||||
* Create a log message with a timestamp in the format YYYY-MM-DD HH:mm:ss.SSS UTC
|
||||
* @param message
|
||||
* @returns
|
||||
*/
|
||||
function msgWithTimestamp(message: string): string {
|
||||
const now = new Date();
|
||||
// /**
|
||||
// * Create a log message with a timestamp in the format YYYY-MM-DD HH:mm:ss.SSS UTC
|
||||
// * @param message
|
||||
// * @returns
|
||||
// */
|
||||
// function msgWithTimestamp(message: string): string {
|
||||
// const now = new Date();
|
||||
|
||||
const pad = (n: number, width = 2) => n.toString().padStart(width, "0");
|
||||
// const pad = (n: number, width = 2) => n.toString().padStart(width, "0");
|
||||
|
||||
const year = now.getUTCFullYear();
|
||||
const month = pad(now.getUTCMonth() + 1);
|
||||
const day = pad(now.getUTCDate());
|
||||
// const year = now.getUTCFullYear();
|
||||
// const month = pad(now.getUTCMonth() + 1);
|
||||
// const day = pad(now.getUTCDate());
|
||||
|
||||
const hours = pad(now.getUTCHours());
|
||||
const minutes = pad(now.getUTCMinutes());
|
||||
const seconds = pad(now.getUTCSeconds());
|
||||
const milliseconds = pad(now.getUTCMilliseconds(), 3);
|
||||
// const hours = pad(now.getUTCHours());
|
||||
// const minutes = pad(now.getUTCMinutes());
|
||||
// const seconds = pad(now.getUTCSeconds());
|
||||
// const milliseconds = pad(now.getUTCMilliseconds(), 3);
|
||||
|
||||
const log: typeof TaskLog.infer = {
|
||||
timestamp: `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${milliseconds} UTC`,
|
||||
message,
|
||||
};
|
||||
return JSON.stringify(log);
|
||||
}
|
||||
// const log: typeof TaskLog.infer = {
|
||||
// timestamp: `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${milliseconds} UTC`,
|
||||
// message,
|
||||
// };
|
||||
// return JSON.stringify(log);
|
||||
// }
|
||||
|
||||
export function defineDropTask(buildTask: BuildTask): DropTask {
|
||||
// TODO: only let one task with the same taskGroup run at the same time if specified
|
||||
|
||||
@ -6,8 +6,8 @@ export default defineDropTask({
|
||||
name: "Cleanup Invitations",
|
||||
acls: ["system:maintenance:read"],
|
||||
taskGroup: "cleanup:invitations",
|
||||
async run({ log }) {
|
||||
log("Cleaning invitations");
|
||||
async run({ progress, logger }) {
|
||||
logger.info("Cleaning invitations");
|
||||
|
||||
const now = new Date();
|
||||
|
||||
@ -19,6 +19,7 @@ export default defineDropTask({
|
||||
},
|
||||
});
|
||||
|
||||
log("Done");
|
||||
logger.info("Done");
|
||||
progress(100);
|
||||
},
|
||||
});
|
||||
|
||||
@ -15,36 +15,38 @@ export default defineDropTask({
|
||||
name: "Cleanup Objects",
|
||||
acls: ["system:maintenance:read"],
|
||||
taskGroup: "cleanup:objects",
|
||||
async run({ progress, log }) {
|
||||
log("Cleaning unreferenced objects");
|
||||
async run({ progress, logger }) {
|
||||
logger.info("Cleaning unreferenced objects");
|
||||
|
||||
// get all objects
|
||||
const objects = await objectHandler.listAll();
|
||||
log(`searching for ${objects.length} objects`);
|
||||
logger.info(`searching for ${objects.length} objects`);
|
||||
progress(30);
|
||||
|
||||
// find unreferenced objects
|
||||
const refMap = buildRefMap();
|
||||
log("Building reference map");
|
||||
log(`Found ${Object.keys(refMap).length} models with reference fields`);
|
||||
log("Searching for unreferenced objects");
|
||||
logger.info("Building reference map");
|
||||
logger.info(
|
||||
`Found ${Object.keys(refMap).length} models with reference fields`,
|
||||
);
|
||||
logger.info("Searching for unreferenced objects");
|
||||
const unrefedObjects = await findUnreferencedStrings(objects, refMap);
|
||||
log(`found ${unrefedObjects.length} Unreferenced objects`);
|
||||
// console.log(unrefedObjects);
|
||||
logger.info(`found ${unrefedObjects.length} Unreferenced objects`);
|
||||
// logger.info(unrefedObjects);
|
||||
progress(60);
|
||||
|
||||
// remove objects
|
||||
const deletePromises: Promise<boolean>[] = [];
|
||||
for (const obj of unrefedObjects) {
|
||||
log(`Deleting object ${obj}`);
|
||||
logger.info(`Deleting object ${obj}`);
|
||||
deletePromises.push(objectHandler.deleteAsSystem(obj));
|
||||
}
|
||||
await Promise.all(deletePromises);
|
||||
|
||||
// Remove any possible leftover metadata
|
||||
objectHandler.cleanupMetadata();
|
||||
await objectHandler.cleanupMetadata(logger);
|
||||
|
||||
log("Done");
|
||||
logger.info("Done");
|
||||
progress(100);
|
||||
},
|
||||
});
|
||||
|
||||
@ -6,9 +6,10 @@ export default defineDropTask({
|
||||
name: "Cleanup Sessions",
|
||||
acls: ["system:maintenance:read"],
|
||||
taskGroup: "cleanup:sessions",
|
||||
async run({ log }) {
|
||||
log("Cleaning up sessions");
|
||||
async run({ progress, logger }) {
|
||||
logger.info("Cleaning up sessions");
|
||||
await sessionHandler.cleanupSessions();
|
||||
log("Done");
|
||||
logger.info("Done");
|
||||
progress(100);
|
||||
},
|
||||
});
|
||||
|
||||
@ -21,38 +21,38 @@ export default defineDropTask({
|
||||
name: "Check for Update",
|
||||
acls: ["system:maintenance:read"],
|
||||
taskGroup: "check:update",
|
||||
async run({ log }) {
|
||||
async run({ progress, logger }) {
|
||||
// TODO: maybe implement some sort of rate limit thing to prevent this from calling github api a bunch in the event of crashloop or whatever?
|
||||
// probably will require custom task scheduler for object cleanup anyway, so something to thing about
|
||||
|
||||
if (!systemConfig.shouldCheckForUpdates()) {
|
||||
log("Update check is disabled by configuration");
|
||||
logger.info("Update check is disabled by configuration");
|
||||
progress(100);
|
||||
return;
|
||||
}
|
||||
|
||||
log("Checking for update");
|
||||
logger.info("Checking for update");
|
||||
|
||||
const currVerStr = systemConfig.getDropVersion();
|
||||
const currVer = semver.coerce(currVerStr);
|
||||
if (currVer === null) {
|
||||
const msg = "Drop provided a invalid semver tag";
|
||||
log(msg);
|
||||
logger.info(msg);
|
||||
throw new Error(msg);
|
||||
}
|
||||
progress(30);
|
||||
|
||||
const response = await fetch(
|
||||
"https://api.github.com/repos/Drop-OSS/drop/releases/latest",
|
||||
);
|
||||
progress(50);
|
||||
|
||||
// if response failed somehow
|
||||
if (!response.ok) {
|
||||
log(
|
||||
"Failed to check for update " +
|
||||
JSON.stringify({
|
||||
status: response.status,
|
||||
body: response.body,
|
||||
}),
|
||||
);
|
||||
logger.info("Failed to check for update ", {
|
||||
status: response.status,
|
||||
body: response.body,
|
||||
});
|
||||
|
||||
throw new Error(
|
||||
`Failed to check for update: ${response.status} ${response.body}`,
|
||||
@ -63,8 +63,8 @@ export default defineDropTask({
|
||||
const resJson = await response.json();
|
||||
const body = latestRelease(resJson);
|
||||
if (body instanceof type.errors) {
|
||||
log(body.summary);
|
||||
log("GitHub Api response" + JSON.stringify(resJson));
|
||||
logger.info(body.summary);
|
||||
logger.info("GitHub Api response" + JSON.stringify(resJson));
|
||||
throw new Error(
|
||||
`GitHub Api response did not match expected schema: ${body.summary}`,
|
||||
);
|
||||
@ -74,14 +74,15 @@ export default defineDropTask({
|
||||
const latestVer = semver.coerce(body.tag_name);
|
||||
if (latestVer === null) {
|
||||
const msg = "Github Api returned invalid semver tag";
|
||||
log(msg);
|
||||
logger.info(msg);
|
||||
throw new Error(msg);
|
||||
}
|
||||
progress(70);
|
||||
|
||||
// TODO: handle prerelease identifiers https://github.com/npm/node-semver#prerelease-identifiers
|
||||
// check if is newer version
|
||||
if (semver.gt(latestVer, currVer)) {
|
||||
log("Update available");
|
||||
logger.info("Update available");
|
||||
notificationSystem.systemPush({
|
||||
nonce: `drop-update-available-${currVer}-to-${latestVer}`,
|
||||
title: `Update available to v${latestVer}`,
|
||||
@ -90,9 +91,10 @@ export default defineDropTask({
|
||||
acls: ["system:notifications:read"],
|
||||
});
|
||||
} else {
|
||||
log("no update available");
|
||||
logger.info("no update available");
|
||||
}
|
||||
|
||||
log("Done");
|
||||
logger.info("Done");
|
||||
progress(100);
|
||||
},
|
||||
});
|
||||
|
||||
@ -5,6 +5,7 @@ import { GiantBombProvider } from "../internal/metadata/giantbomb";
|
||||
import { IGDBProvider } from "../internal/metadata/igdb";
|
||||
import { ManualMetadataProvider } from "../internal/metadata/manual";
|
||||
import { PCGamingWikiProvider } from "../internal/metadata/pcgamingwiki";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
export default defineNitroPlugin(async (_nitro) => {
|
||||
const metadataProviders = [
|
||||
@ -21,9 +22,9 @@ export default defineNitroPlugin(async (_nitro) => {
|
||||
const id = prov.source();
|
||||
providers.set(id, prov);
|
||||
|
||||
console.log(`enabled metadata provider: ${prov.name()}`);
|
||||
logger.info(`enabled metadata provider: ${prov.name()}`);
|
||||
} catch (e) {
|
||||
console.warn(`skipping metadata provider setup: ${e}`);
|
||||
logger.warn(`skipping metadata provider setup: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
@ -35,7 +36,7 @@ export default defineNitroPlugin(async (_nitro) => {
|
||||
const priority = max * 2 - index; // Offset by the length --- (max - index) + max
|
||||
const provider = providers.get(providerId);
|
||||
if (!provider) {
|
||||
console.warn(`failed to add existing metadata provider: ${providerId}`);
|
||||
logger.warn(`failed to add existing metadata provider: ${providerId}`);
|
||||
continue;
|
||||
}
|
||||
metadataHandler.addProvider(provider, priority);
|
||||
|
||||
@ -7,6 +7,7 @@ import { FilesystemProvider } from "../internal/library/providers/filesystem";
|
||||
import libraryManager from "../internal/library";
|
||||
import path from "path";
|
||||
import { FlatFilesystemProvider } from "../internal/library/providers/flat";
|
||||
import { logger } from "~/server/internal/logging";
|
||||
|
||||
export const libraryConstructors: {
|
||||
[key in LibraryBackend]: (
|
||||
@ -67,14 +68,14 @@ export default defineNitroPlugin(async () => {
|
||||
libraryManager.addLibrary(provider);
|
||||
successes++;
|
||||
} catch (e) {
|
||||
console.warn(
|
||||
logger.warn(
|
||||
`Failed to create library (${library.id}) of type ${library.backend}:\n ${e}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (successes == 0) {
|
||||
console.warn(
|
||||
logger.warn(
|
||||
"No library was successfully initialised. Please check for errors. If you have just set up an instance, this is normal.",
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user