From bc97d9106bce182b912dcfdf220bd56f841f604c Mon Sep 17 00:00:00 2001 From: Avior Date: Wed, 11 Sep 2024 14:38:58 +0200 Subject: [PATCH] feat: Filemagedon Signed-off-by: Avior --- console.ts | 9 + package.json | 3 +- src/commands/Migrations/current.ts | 21 + src/commands/Migrations/migrate.ts | 19 + src/commands/index.ts | 78 ++++ src/components/global/Picture.astro | 11 +- src/libs/AstroUtils.ts | 5 - src/libs/Emails/Email.ts | 249 ++++++++++ src/libs/Emails/EmailServer.ts | 234 ++++++++++ src/libs/Emails/SMTP.ts | 74 +++ src/libs/FilesFormats/CSV.ts | 114 +++++ src/libs/FilesFormats/XLSX.ts | 93 ++++ src/libs/FilesFormats/XML.ts | 262 +++++++++++ src/libs/Form.ts | 159 +++++++ src/libs/S3.ts | 97 ++++ src/libs/Schema/Items/DzeioLiteral.ts | 23 + src/libs/Schema/Items/SchemaArray.ts | 93 ++++ src/libs/Schema/Items/SchemaBoolean.ts | 8 + src/libs/Schema/Items/SchemaDate.ts | 48 ++ src/libs/Schema/Items/SchemaFile.ts | 21 + src/libs/Schema/Items/SchemaNullable.ts | 65 +++ src/libs/Schema/Items/SchemaNumber.ts | 89 ++++ src/libs/Schema/Items/SchemaRecord.ts | 88 ++++ src/libs/Schema/Items/SchemaString.ts | 76 +++ src/libs/Schema/README.md | 25 + src/libs/Schema/SchemaItem.ts | 169 +++++++ src/libs/Schema/index.ts | 217 +++++++++ src/libs/Schema/utils.ts | 3 + src/models/Adapters/AdapterUtils.ts | 234 ++++++++++ src/models/Adapters/CSVAdapter.ts | 54 +++ src/models/Adapters/CassandraAdapter.ts.old | 433 ++++++++++++++++++ src/models/{Dao.ts => Adapters/DaoAdapter.ts} | 73 ++- src/models/Adapters/FSAdapter.ts | 221 +++++++++ src/models/Adapters/LDAPAdapter.ts | 221 +++++++++ src/models/Adapters/MultiAdapter.ts | 69 +++ src/models/Adapters/PostgresAdapter.ts | 324 +++++++++++++ src/models/Clients/CassandraClient.ts | 130 ++++++ src/models/Clients/Client.ts | 137 ++++++ src/models/Clients/PostgresClient.ts | 71 +++ src/models/DaoFactory.ts | 47 +- src/models/Migrations/Example.ts | 35 ++ src/models/Migrations/Migration.d.ts | 10 + src/models/Query.ts | 149 ++++++ src/models/config.ts | 48 ++ vitest.config.ts | 3 +- 45 files changed, 4548 insertions(+), 64 deletions(-) create mode 100644 console.ts create mode 100644 src/commands/Migrations/current.ts create mode 100644 src/commands/Migrations/migrate.ts create mode 100644 src/commands/index.ts delete mode 100644 src/libs/AstroUtils.ts create mode 100644 src/libs/Emails/Email.ts create mode 100644 src/libs/Emails/EmailServer.ts create mode 100644 src/libs/Emails/SMTP.ts create mode 100644 src/libs/FilesFormats/CSV.ts create mode 100644 src/libs/FilesFormats/XLSX.ts create mode 100644 src/libs/FilesFormats/XML.ts create mode 100644 src/libs/Form.ts create mode 100644 src/libs/S3.ts create mode 100644 src/libs/Schema/Items/DzeioLiteral.ts create mode 100644 src/libs/Schema/Items/SchemaArray.ts create mode 100644 src/libs/Schema/Items/SchemaBoolean.ts create mode 100644 src/libs/Schema/Items/SchemaDate.ts create mode 100644 src/libs/Schema/Items/SchemaFile.ts create mode 100644 src/libs/Schema/Items/SchemaNullable.ts create mode 100644 src/libs/Schema/Items/SchemaNumber.ts create mode 100644 src/libs/Schema/Items/SchemaRecord.ts create mode 100644 src/libs/Schema/Items/SchemaString.ts create mode 100644 src/libs/Schema/README.md create mode 100644 src/libs/Schema/SchemaItem.ts create mode 100644 src/libs/Schema/index.ts create mode 100644 src/libs/Schema/utils.ts create mode 100644 src/models/Adapters/AdapterUtils.ts create mode 100644 src/models/Adapters/CSVAdapter.ts create mode 100644 src/models/Adapters/CassandraAdapter.ts.old rename src/models/{Dao.ts => Adapters/DaoAdapter.ts} (54%) create mode 100644 src/models/Adapters/FSAdapter.ts create mode 100644 src/models/Adapters/LDAPAdapter.ts create mode 100644 src/models/Adapters/MultiAdapter.ts create mode 100644 src/models/Adapters/PostgresAdapter.ts create mode 100644 src/models/Clients/CassandraClient.ts create mode 100644 src/models/Clients/Client.ts create mode 100644 src/models/Clients/PostgresClient.ts create mode 100644 src/models/Migrations/Example.ts create mode 100644 src/models/Migrations/Migration.d.ts create mode 100644 src/models/Query.ts create mode 100644 src/models/config.ts diff --git a/console.ts b/console.ts new file mode 100644 index 0000000..71c0599 --- /dev/null +++ b/console.ts @@ -0,0 +1,9 @@ +/** + * This file is used as a shortcute to `src/commands/index.ts` + * + * It allows you to run commands that will change things in the codebase + * + * to start, run `bun console.ts` + */ + +import 'commands/index' diff --git a/package.json b/package.json index 4719588..f8e015e 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,8 @@ "test:unit": "vitest --coverage --run", "test:e2e": "playwright test", "install:test": "playwright install --with-deps", - "lint": "biome check ." + "lint": "biome check .", + "command": "bun run src/commands/index.ts" }, "dependencies": { "@astrojs/node": "^8.0.0", diff --git a/src/commands/Migrations/current.ts b/src/commands/Migrations/current.ts new file mode 100644 index 0000000..aa96e56 --- /dev/null +++ b/src/commands/Migrations/current.ts @@ -0,0 +1,21 @@ +import type { Command } from 'commands' +import DaoFactory from 'models/DaoFactory' + +const command: Command = { + name: 'migrations:current', + description: 'Get the current version of the database', + async run() { + const client = await DaoFactory.client() + await client.connect() + const ver = await client.getVersion() + if (ver < 0) { + console.log('no database :(') + } else { + console.log(`Current database version: ${new Date(ver)}`) + } + return { + code: 0 + } + }, +} +export default command diff --git a/src/commands/Migrations/migrate.ts b/src/commands/Migrations/migrate.ts new file mode 100644 index 0000000..a643093 --- /dev/null +++ b/src/commands/Migrations/migrate.ts @@ -0,0 +1,19 @@ +import type { Command } from 'commands' +import DaoFactory from 'models/DaoFactory' + +const command: Command = { + name: 'migrations:migrate', + description: 'Migrate the database to the latest version', + async run() { + const client = await DaoFactory.client() + console.log('connecting...') + await client.connect() + console.log('migrating...') + await client.migrateToLatest() + console.log('migrations should be ok :D') + return { + code: 0 + } + }, +} +export default command diff --git a/src/commands/index.ts b/src/commands/index.ts new file mode 100644 index 0000000..67bf433 --- /dev/null +++ b/src/commands/index.ts @@ -0,0 +1,78 @@ +import fs from "node:fs/promises" + +interface Context { + args: Array + commands: Array + command: string +} + +interface Response { + code: number +} + +export interface Command { + run(input: Context): Promise | Response + name: string + description?: string +} + +const builtinCommands: Array = [ + { + name: "help", + run({ commands }) { + console.table( + commands.map((command) => ({ + name: command.name, + description: command.description ?? "no description", + })), + ); + return { + code: 0, + }; + }, + }, +]; + +async function createContext(): Promise { + const ctx = { + args: process.argv.slice(3), + commands: await getCommands(), + command: process.argv[2] ?? "help", + }; + return ctx +} + +async function listfiles(folder: string): Promise> { + const files = await fs.readdir(folder) + const res: Array = []; + for (const file of files) { + const path = `${folder}/${file}` + if ((await fs.stat(path)).isDirectory()) { + res.push(...(await listfiles(path))) + } else { + res.push(path) + } + } + return res +} + +async function getCommands(): Promise> { + const files = (await listfiles(__dirname)) + .filter((it) => it !== `${__dirname}/index.ts`) + .map((it) => import(it).then((it) => it.default)) + return builtinCommands.concat(await Promise.all(files)) +} + +;(async () => { + const context = await createContext() + for (const command of context.commands) { + if (command.name === context.command) { + const res = await command.run(context) + process.exit(res.code) + } + } + + console.log( + `command "${context.command}" not found, please use "help" to get the list of commands`, + ) +})() diff --git a/src/components/global/Picture.astro b/src/components/global/Picture.astro index fd7e2e5..7ec1589 100644 --- a/src/components/global/Picture.astro +++ b/src/components/global/Picture.astro @@ -1,7 +1,6 @@ --- import { getImage } from 'astro:assets' import { objectOmit } from '@dzeio/object-util' -import AstroUtils from '../../libs/AstroUtils' const formats = ['avif', 'webp'] @@ -62,12 +61,10 @@ async function resolvePicture(image: ImageMetadata | string): Promise(async () => { - return { - light: await resolvePicture(Astro.props.src), - dark: Astro.props.srcDark ? await resolvePicture(Astro.props.srcDark) : undefined - } -}) +const res = { + light: await resolvePicture(Astro.props.src), + dark: Astro.props.srcDark ? await resolvePicture(Astro.props.srcDark) : undefined +} const props = objectOmit(Astro.props, 'src', 'srcDark', 'class') --- diff --git a/src/libs/AstroUtils.ts b/src/libs/AstroUtils.ts deleted file mode 100644 index 240fbe3..0000000 --- a/src/libs/AstroUtils.ts +++ /dev/null @@ -1,5 +0,0 @@ -export default class AstroUtils { - public static async wrap(fn: () => T | Promise) { - return await fn() - } -} diff --git a/src/libs/Emails/Email.ts b/src/libs/Emails/Email.ts new file mode 100644 index 0000000..a740455 --- /dev/null +++ b/src/libs/Emails/Email.ts @@ -0,0 +1,249 @@ +import type { ImapMessageAttributes } from 'imap' +import Imap from 'imap' + +interface EmailAddress { + name?: string + address: string +} + +interface Headers { + to: Array + from: EmailAddress + date: Date + subject: string +} + +export default class Email { + public constructor( + private imap: Imap, + private mailbox: Imap.Box, + public readonly id: number + ) {} + + public async getAttributes(): Promise> { + return new Promise>((res) => { + this.imap.fetch(this.id, { + bodies: [], + struct: true + }).on('message', (msg) => msg.on('attributes', (attrs) => { + res(attrs.struct as Array) + })) + }) + } + + + public async getFlags(): Promise> { + + return new Promise>((res) => { + this.imap.fetch(this.id, { + bodies: [], + struct: false + }).on('message', (msg) => msg.on('attributes', (attrs) => { + res(attrs.flags) + })) + }) + } + + public async getText() { + const part = await this.getTextPart() + if (!part) { + return '' + } + return this.fetchPart(part.partID).then(this.decodeEmail) + } + + public async isSeen() { + return this.getFlags().then((arr) => arr.includes('\\Seen')) + } + + public async setIsSeen(isSeen: boolean): Promise { + console.log(this.mailbox.flags, this.mailbox.permFlags, '\\\\Seen') + console.log('isSeen', isSeen, await this.isSeen()) + if (await this.isSeen() === isSeen) { + return isSeen + } + if (isSeen) { + return new Promise((res, rej) => { + this.imap.addFlags(this.id, 'SEEN', (err) => { + if (err) { + rej(err) + return + } + res(isSeen) + }) + }) + } + + return new Promise((res, rej) => { + this.imap.addKeywords(this.id, 'SEEN', (err) => { + if (err) { + rej(err) + return + } + res(isSeen) + }) + }) + } + + public async getHeaders(): Promise { + const req = this.imap.fetch(this.id, { + bodies: ['HEADER'], + struct: false + }) + return new Promise((res) => { + req.on('message', (msg) => msg.on('body', (stream) => { + let bfr = '' + stream + .on('data', (chunk) => bfr += chunk.toString('utf8')) + .once('end', () => { + const tmp = Imap.parseHeader(bfr) + function t(email: string): EmailAddress { + if (email.includes('<')) { + const [name, addr] = email.split('<', 2) + if (!addr || !name) { + return { + address: email + } + } + return { + name, + address: addr.slice(0, addr.indexOf('>')) + } + } + return { + address: email + } + } + // console.log(tmp) + res({ + subject: tmp.subject?.[0] as string, + to: tmp.to?.map(t) ?? [], + from: t(tmp.from?.[0] ?? ''), + date: new Date(tmp.date?.[0] ?? '') + }) + }) + })) + }) + } + + /** + * hold the attachment ID + */ + public async getAttachments(): Promise> { + return this.getAttachmentParts() + } + + public async downloadAttachment(attachment: any): Promise<{filename: string, data: Buffer}> { + const req = this.imap.fetch(this.id, { + bodies: [attachment.partID], + struct: true + }) + return new Promise((res) => { + req.on('message', (msg) => { + const filename = attachment.params.name + const encoding = attachment.encoding + console.log(filename, encoding, msg) + let buffer = new Uint8Array(0) + msg.on('body', (stream) => { + stream.on('data', (chunk: Buffer) => { + // merge into one common buffer + const len = buffer.length + chunk.length + const merged = new Uint8Array(len) + merged.set(buffer) + merged.set(chunk, buffer.length) + buffer = merged + }).once('end', () => { + res({filename, data: Buffer.from( + Buffer + .from(buffer) + .toString('ascii'), + 'base64' + )}) + }) + }).once('end', () => { + console.log('ended') + }).once('error', (err) => { + console.log(err) + }) + }).once('error', (err) => { + console.log(err) + }) + }) + } + + // this is defined but eslint do not seems to reconize it + // eslint-disable-next-line no-undef + private fetchPart(partID: string, encoding: BufferEncoding = 'utf8'): Promise { + const req = this.imap.fetch(this.id, { + bodies: partID, + struct: true + }) + return new Promise((res) => { + req.on('message', (msg) => { + msg.on('body', (strm) => { + let bfr = '' + strm.on('data', (chunk: Buffer) => bfr += chunk.toString(encoding)) + .once('end', () => res(bfr)) + }) + }) + }) + } + + private async getTextPart(attrs?: ImapMessageAttributes | Array): Promise<{partID: string} | null> { + if (!attrs) { + attrs = await this.getAttributes() + } + // @ts-expect-error IMAP does not return the correct type + for (const item of attrs) { + if (Array.isArray(item)) { + return this.getTextPart(item) + } else if (item.type === 'text' && item.subtype === 'plain') { + return item + } + } + return null + } + + private async getAttachmentParts(attrs?: ImapMessageAttributes | Array): Promise> { + if (!attrs) { + attrs = await this.getAttributes() + } + const attachments = [] + for (const item of attrs) { + if (Array.isArray(item)) { + attachments.push(...await this.getAttachmentParts(item)) + } else if (item.disposition && ['inline', 'attachment'].indexOf(item.disposition.type.toLowerCase()) > -1) { + attachments.push(item) + } + + } + return attachments + } + + private decodeEmail(data: string): string { + // normalise end-of-line signals + data = data.replace(/(\r\n|\n|\r)/g, '\n') + + // replace equals sign at end-of-line with nothing + data = data.replace(/=\n/g, '') + + // encoded text might contain percent signs + // decode each section separately + const bits = data.split('%') + for (let idx = 0; idx < bits.length; idx++) { + let char = bits[idx] + if (!char) { + continue + } + + // replace equals sign with percent sign + char = char.replace(/=/g, '%') + + // decode the section + bits[idx] = decodeURIComponent(char) + } + + // join the sections back together + return bits.join('%') + } +} diff --git a/src/libs/Emails/EmailServer.ts b/src/libs/Emails/EmailServer.ts new file mode 100644 index 0000000..7626c46 --- /dev/null +++ b/src/libs/Emails/EmailServer.ts @@ -0,0 +1,234 @@ +import { objectFind } from '@dzeio/object-util' +import Imap from 'imap' +import { getEnv, requireEnv } from 'libs/Env' +import nodemailer from 'nodemailer' +import type Mail from 'nodemailer/lib/mailer' +import Email from './Email' +import htmlEmail from './HTML' + +export default class EmailServer { + private static instances: Record = {} + + public debug: boolean + + private currentBox: Imap.Box | null = null + + private imap: Imap + + private smtp: nodemailer.Transporter + + private readonly config: { + imap: { + host: string + port: number + tls: boolean + username: string + } + smtp: { + host: string + port: number + secure: boolean + username: string + } + } + + private constructor( + private username: string, + password: string, + initConfig?: { + auth?: { + username: string + password: string + } + imap?: { + host: string + port?: number + tls?: boolean + username?: string + password?: string + } + smtp?: { + host: string + port?: number + secure?: boolean + username?: string + password?: string + }, + debug?: boolean + } + ) { + this.debug = initConfig?.debug ?? getEnv('EMAIL_DEBUG', 'false') === 'true' + + this.config = { + imap: { + host: initConfig?.imap?.host ?? requireEnv('IMAP_HOST'), + port: initConfig?.imap?.port ?? Number.parseInt(getEnv('IMAP_PORT', '993'), 10), + tls: initConfig?.imap?.tls ?? !!getEnv('IMAP_SECURE'), + username: initConfig?.imap?.username ?? initConfig?.auth?.username ?? username + }, + smtp: { + host: initConfig?.smtp?.host ?? requireEnv('SMTP_HOST'), + port: initConfig?.smtp?.port ?? Number.parseInt(getEnv('SMTP_PORT', '465'), 10), + secure: initConfig?.smtp?.secure ?? !!getEnv('SMTP_SECURE'), + username: initConfig?.smtp?.username ?? initConfig?.auth?.username ?? username + } + } + + EmailServer.instances[username] = this + + this.imap = new Imap({ + user: this.config.imap.username, + password: initConfig?.imap?.password ?? initConfig?.auth?.password ?? password, + host: this.config.imap.host, + port: this.config.imap.port, + tls: this.config.imap.tls, + debug: (info: string) => { + if (this.debug) { + console.log('IMAP[DEBUG]:', info) + } + } + }) + + // biome-ignore lint/suspicious/noExplicitAny: library return `any` + const smtpLogger = (level: string) => (...params: Array) => { + if (this.debug) { + console.log(`SMTP[${level}]:`, ...params) + } + } + + this.smtp = nodemailer.createTransport({ + host: this.config.smtp.host, + port: this.config.smtp.port, + secure: this.config.smtp.secure, + auth: { + user: this.config.smtp.username, + pass: initConfig?.smtp?.password ?? initConfig?.auth?.password ?? password + }, + logger: { + level: (level: string) => { + if (this.debug) { + console.log('SMTP[]:', level) + } + }, + trace: smtpLogger('TRACE'), + debug: smtpLogger('DEBUG'), + info: smtpLogger('INFO'), + warn: smtpLogger('WARN'), + error: smtpLogger('ERROR'), + fatal: smtpLogger('FATAL'), + } + }) + } + + public static async getInstance(type: 'credo' | 'gti') { + const items = { + credo: { + username: requireEnv('CREDO_MAIL_USER'), + password: requireEnv('CREDO_MAIL_PASS'), + }, + gti: { + username: requireEnv('GTI_MAIL_USER'), + password: requireEnv('GTI_MAIL_PASS'), + } + } as const + const correct = items[type] + const tmp = objectFind(EmailServer.instances, (_, key) => key === type) + let instance = tmp?.value ?? undefined + + if (!instance) { + instance = new EmailServer(correct.username, correct.password) + EmailServer.instances[type] = instance + await instance.connect() + } + + return instance + } + + public destroy() { + delete EmailServer.instances[this.username] + this.imap.end() + } + + public listEmails(): Promise> { + return new Promise((res, rej) => { + this.imap.search(['ALL'], (err, uids) => { + if (err) { + rej(err) + return + } + res(uids.map((uid) => this.getEmail(uid))) + }) + }) + } + + public getEmail(id: number) { + if (!this.currentBox) { + throw new Error('can\'t fetch a mail while out of a box') + } + return new Email(this.imap, this.currentBox, id) + } + + /** + * + * @param content the email text content + * @param recipient the email recipient (who to send it to) + * @param subject the email subject + * @param footer the email footer + */ + // eslint-disable-next-line complexity + public async sendEmail(content: string, recipient: string | Array, subject: string, footer?: {path?: string, id?: string}, options: Mail.Options = {}) { + if (typeof recipient !== 'string' && recipient.length === 0) { + if (this.debug) { + console.log('Email canceled, no recipient') + } + return + } + const domain = requireEnv('APP_URL') + const footerTxt = `\nIdentifiant de conversation: {{${footer?.id}}} + +${footer?.path ? `Votre lien vers le site internet: https://${domain}${footer?.path}` : '' }` + + const req: Mail.Options = Object.assign({ + bcc: recipient, + from: getEnv('SMTP_FROM') ?? this.username, + subject, + text: content + (footer ? footerTxt : ''), + html: footer ? htmlEmail(content, footer?.id, footer?.path ? `${domain}${footer?.path}` : undefined) : undefined + }, options) + + if (this.debug) { + console.log('------------------- SEND EMAIL DEBUG -------------------') + console.log(req) + console.log('------------------- SEND EMAIL DEBUG -------------------') + } else { + const res = await this.smtp.sendMail(req) + if (this.debug) { + console.log('------------------- SENT EMAIL DEBUG -------------------') + console.log(res) + console.log('------------------- SENT EMAIL DEBUG -------------------') + } + } + } + + private connect() { + console.log('Starting connection to IMAP') + return new Promise((res, rej) => { + this.imap.once('ready', () => { + console.log('connection to IMAP ready, opening box') + this.imap.openBox(requireEnv('IMAP_INBOX', 'INBOX'), true, (err, box) => { + if (err) { + rej(err) + return + } + this.currentBox = box + console.log('inbox open, ready for queries!') + res() + }) + }).once('error', (err: Error) => { + console.log('An error occured while connecting to the IMAP server', this.config.imap) + rej(err) + }) + this.imap.connect() + }) + } +} diff --git a/src/libs/Emails/SMTP.ts b/src/libs/Emails/SMTP.ts new file mode 100644 index 0000000..dfaa47c --- /dev/null +++ b/src/libs/Emails/SMTP.ts @@ -0,0 +1,74 @@ +import nodemailer from 'nodemailer' +import type NodeMailer from 'nodemailer/lib/mailer' + +/** + * Environment variables used + * EMAIL_USERNAME + * EMAIL_PASSWORD + * EMAIL_HOST + * EMAIL_FROM + */ +// eslint-disable-next-line @typescript-eslint/ban-types +export default class Mailer> = {}> { + public constructor( + private fetcher: (template: keyof Templates, ext: 'html' | 'txt') => Promise<{ + content: string + title: string + } | null>, private settings: { + username: string + password: string + host: string + from: string + secure?: boolean + tls?: boolean + }) {} + + /** + * Send the Email + * @param template the Template to use + * @param to the destination Email + * @param vars variables of the email, don't give subject as it is added inside the function + * @param options Email options + */ + public async send(template: T, to: string | Array, vars: Templates[T], options?: Omit) { + const mailer = nodemailer.createTransport({ + host: this.settings.host, + auth: { + user: this.settings.username, + pass: this.settings.password + }, + logger: true, + secure: true + }) + const { title } = await this.fetcher(template, 'txt') ?? { title: '' } + await mailer.sendMail(Object.assign(options ?? {}, { + to, + from: this.settings.from, + subject: title, + html: await this.html(template, { ...vars, subject: title }), + text: await this.text(template, { ...vars, subject: title }) + })) + } + + public html(template: T, vars: Templates[T]) { + return this.generateTemplate(template, vars, 'html') + } + + public text(template: T, vars: Templates[T]) { + return this.generateTemplate(template, vars, 'txt') + } + + private async generateTemplate(template: T, _vars: Templates[T], ext: 'html' | 'txt') { + try { + const txt = await this.fetcher(template, ext) + if (!txt) { + console.warn(`Error, Template not found (${template as string} - ${ext})`) + return undefined + } + return txt.content + } catch { + console.warn(`Error, Template not found (${template as string} - ${ext})`) + return undefined + } + } +} diff --git a/src/libs/FilesFormats/CSV.ts b/src/libs/FilesFormats/CSV.ts new file mode 100644 index 0000000..21291d6 --- /dev/null +++ b/src/libs/FilesFormats/CSV.ts @@ -0,0 +1,114 @@ +import { objectMap, objectSort, objectValues } from '@dzeio/object-util' +import assert from "node:assert/strict" + +export interface CSVOptions { + lineSeparator?: string + columnSeparator?: string + /** + * if set, it will skip trying to parse a number + */ + skipParsing?: boolean +} + +// biome-ignore lint/complexity/noStaticOnlyClass: +// biome-ignore lint/style/useNamingConvention: +export default class CSV { + + // eslint-disable-next-line complexity + public static parse(data: string, options?: CSVOptions): Array> { + assert(typeof data === 'string', `Data is not a string (${typeof data})`) + const lineSeparator = options?.lineSeparator ?? '\n' + const colSeparator = options?.columnSeparator ?? ',' + + let headers: Array | null = null + + const res: Array> = [] + + let values: Array = [] + + let previousSplit = 0 + let quoteCount = 0 + for (let idx = 0; idx < data.length; idx++) { + const char = data[idx] + if (char === '"') { + quoteCount++ + } + if ((char === colSeparator || char === lineSeparator) && quoteCount % 2 === 0) { + let text = data.slice(previousSplit, idx) + if (text.startsWith('"') && text.endsWith('"')) { + text = text.slice(1, text.length - 1) + } + values.push(text) + previousSplit = idx + 1 + } + if (char === lineSeparator && quoteCount % 2 === 0) { + if (!headers) { + headers = values + values = [] + continue + } + const lineFinal: Record = {} + let filled = false // filled make sure to skip empty lines + // eslint-disable-next-line max-depth + for (let idx2 = 0; idx2 < values.length; idx2++) { + let value: string | number = values[idx2]! + if (value.length === 0) { + continue + } + // eslint-disable-next-line max-depth + if (!options?.skipParsing && /^-?(\d|\.|E)+$/g.test(value as string)) { + value = Number.parseFloat(value as string) + } + const key = headers[idx2]! + lineFinal[key] = value + filled = true + } + if (filled) { + res.push(lineFinal) + } + values = [] + } + } + return res + } + + public static stringify(headers: Array, data: Array>, options?: CSVOptions) { + const ls = options?.lineSeparator ?? '\n' + + // encode headers + let body = CSV.encodeLine(headers, options) + ls + + // encode data body + for (const entry of data) { + body += CSV.encodeLine(objectValues(objectSort(entry, headers)), options) + ls + } + return body + } + + private static encodeLine(line: Array, options?: CSVOptions): string { + const ls = options?.lineSeparator ?? '\n' + const cs = options?.columnSeparator ?? ',' + + return objectMap(line, (it) => { + if (typeof it !== 'string') { + return it + } + if (it.includes('"') || it.includes(ls)|| it.includes(cs)) { + return `"${it}"` + } + return it + //column separator + }).join(cs) + } + + public static getHeaders(data: string, options?: { + lineSeparator?: string + columnSeparator?: string + }) { + const ls = options?.lineSeparator ?? '\n' + const cs = options?.columnSeparator ?? ',' + + //line separator et column separator + return data.split(ls)[0]?.split(cs)! + } +} diff --git a/src/libs/FilesFormats/XLSX.ts b/src/libs/FilesFormats/XLSX.ts new file mode 100644 index 0000000..4f2d786 --- /dev/null +++ b/src/libs/FilesFormats/XLSX.ts @@ -0,0 +1,93 @@ +import { objectRemap } from '@dzeio/object-util' +import AdmZip from 'adm-zip' +import CSV from './CSV' +import XML, { type Tag } from './XML' + +export default class XLSX { + /** + * + * @param xlsx the xlsx data as a buffer + * @returns a Record with each sheets and the raw CSV linked to it + */ + public static async parse(xlsx: ArrayBuffer): Promise> { + const zip = new AdmZip(Buffer.from(xlsx)) + const shared = await XML.parse(zip.readAsText('xl/sharedStrings.xml')) + const workbook = zip.readAsText('xl/workbook.xml') + const relations = zip.readAsText('xl/_rels/workbook.xml.rels') + const sheetsRelations = await XML.parse(relations) + const sheetsList = XML.findChild(await XML.parse(workbook), 'sheets')!.childs?.map((it) => ({ + name: XML.getAttribute(it as Tag, 'name'), + id: XML.getAttribute(it as Tag, 'r:id'), + path: '', + data: '' + }))! + for (const sheetItem of sheetsList) { + const rels = (sheetsRelations.childs as Array) + const rel = rels.find((it) => XML.getAttribute(it, 'Id') === sheetItem.id) + sheetItem.path = XML.getAttribute(rel!, 'Target')! + } + + await Promise.all(sheetsList.map(async (it) => { + it.data = await this.parseWorkSheet(shared, zip.readAsText(`xl/${it.path}`)) + return it + })) + + return objectRemap(sheetsList, (v) => ({ + key: v.name, + value: v.data + })) + } + public static async parseWorkSheet(refs: Tag, data: string): Promise { + const json = await XML.parse(data) + const sheetData = XML.findChild(json, 'sheetData')! + let headers: Array = [] + const res: Array> = [] + let headerDone = false + for (const row of sheetData.childs ?? []) { + const line: Array = [] + const id = XML.getAttribute((row as Tag), 'r') + for (const col of (row as Tag).childs ?? []) { + if (!(col as Tag).childs) { + continue + } + const type = XML.getAttribute(col as Tag, 't') + const colIdx = XML.getAttribute(col as Tag, 'r') + const idx = colIdx!.charCodeAt(0) - 65 // TODO: handle more than 26 cols + const value = XML.findChild(col as Tag, 'v')?.childs?.[0] + if (!value || typeof value !== 'string') { + continue + } + // const value = ((col as Tag).childs![0] as Tag).childs![0] as string + if (type === 's') { + line[idx] = this.getRef(refs, value) + } else { + line[idx] = value + } + } + + if (!headerDone) { + headers = line + } else { + res[parseInt(id!, 10) - 1] = objectRemap(line, (v, idx: number) => { + return { + key: headers[idx] as string, + value: v + } + }) + } + + + headerDone = true + } + + return CSV.stringify(headers, res) + } + + private static getRef(refs: Tag, id: string | number): string { + if (typeof id === 'string') { + id = parseInt(id, 10) + } + + return ((refs.childs![id] as Tag)!.childs![0] as Tag)!.childs![0] as string + } +} diff --git a/src/libs/FilesFormats/XML.ts b/src/libs/FilesFormats/XML.ts new file mode 100644 index 0000000..e619907 --- /dev/null +++ b/src/libs/FilesFormats/XML.ts @@ -0,0 +1,262 @@ +import { objectMap } from '@dzeio/object-util' +import xml2js from 'xml2js' + +export interface Attribute { + key: string + value: string +} + +export interface Tag { + + /** + * the name of the tag + */ + name: string + + /** + * The tag attributes + */ + attrs?: Array + + /** + * The tag childs + */ + childs?: Array +} + +/** + * xml2js tag interface not published for our options + */ +interface xml2jsTag { + /** + * the name of the tag (it is a private value because of our integration, but we still want to get it) + */ + // @ts-ignore see above + // #name: string + /** + * the attributes record + */ + $?: Record + + /** + * the possible text (only when #name === __text__) + */ + _?: string + + /** + * the tag Childs + */ + $$?: Array +} + +export interface XMLOptions { + + /** + * if set (min: `true`) + * it will render the content in a user readable form + * (should not break compatibility (ODT excluded)) + */ + pretty?: boolean | { + + /** + * the whitespace character(s) + * default: '\t' + */ + whitespaceCharacter?: string + + /** + * the base number of whitespace character to use + * default: 0 + */ + baseCount?: number + } +} + +/** + * XML, the XML parser/stringifier that keep everything in order ! + */ +export default class XML { + + /** + * Parse XML content to xml Tags + * @param str the XML string + */ + public static async parse(str: string): Promise { + const xml: xml2jsTag = await xml2js.parseStringPromise(str, { + charsAsChildren: true, + explicitChildren: true, + explicitRoot: false, + + preserveChildrenOrder: true + }) + return this.convert(xml) + } + + public static getAttribute(tag: Tag, key: string): string | undefined { + if (!tag.attrs || tag.attrs.length === 0) { + return undefined + } + return (tag.attrs.find((it) => it?.key === key))?.value + } + + /** + * Transform the Parsed XML back to XML + * @param tag the root tag + * @param options the options used + */ + public static async stringify(tag: Tag, options?: XMLOptions): Promise { + return this.stringifySync(tag, options) + } + + /** + * Transform the Parsed XML back to XML + * @param tag the root tag + * @param options the options used + */ + public static stringifySync(tag: Tag, options?: XMLOptions): string { + const pretty = !!options?.pretty + const baseCount = (typeof options?.pretty === 'object' && options?.pretty?.baseCount) || 0 + const whitespaceCharacter = (typeof options?.pretty === 'object' && options?.pretty?.whitespaceCharacter) || '\t' + const hasChilds = Array.isArray(tag.childs) && tag.childs.length > 0 + let base = options?.pretty ? ''.padEnd(baseCount, whitespaceCharacter) : '' + + if (!tag.name) { + throw new Error('Tag name MUST be set') + } else if (tag.name.includes(' ')) { + throw new Error(`The tag name MUST not include spaces (${tag.name})`) + } + + // start of tag + base += `<${tag.name}` + + // add every attributes + if (tag.attrs) { + for (const attr of tag.attrs) { + if (typeof attr === 'undefined') { + continue + } + base += ` ${attr.key}` + if (typeof attr.value === 'string') { + base += `="${this.escape(attr.value)}"` + } + } + } + + // end of tag + base += hasChilds ? '>' : '/>' + + if (options?.pretty) { + base += '\n' + } + + // process childs + if (hasChilds) { + for (const subTag of tag.childs!) { + if (typeof subTag === 'undefined') { + continue + } + if (typeof subTag === 'string') { + if (pretty) { + base += ''.padEnd(baseCount + 1, whitespaceCharacter) + } + base += this.escape(subTag) + if (pretty) { + base += '\n' + } + } else { + base += this.stringifySync(subTag, pretty ? { pretty: { baseCount: baseCount + 1, whitespaceCharacter } } : undefined) + } + } + } + + // end of tag + if (hasChilds) { + if (pretty) { + base += ''.padEnd(baseCount, whitespaceCharacter) + } + base += `` + if (pretty) { + base += '\n' + } + } + return base + } + + /** + * + * @param it the element to validate + * @returns {boolean} if {it} is of type {@link Tag} + */ + public static isTag(it: any): it is Tag { + if (typeof it === 'object') { + return 'name' in it + } + return false + } + + public static findChild(tag: Tag, name: string): Tag | null { + if (tag.name === name) { + return tag + } else if (tag.childs) { + for (const child of tag.childs) { + if (typeof child !== 'object') { + continue + } + const found = this.findChild(child, name) + if (found) { + return found + } + } + } + return null + } + + /** + * Convert a xml2js tag to a XML.Tag + * @param {xml2jsTag} tag the xml2js tag + * @returns {Tag} the XML Tag + */ + private static convert(tag: xml2jsTag): Tag { + const baseTag: Tag = { + name: tag['#name'] + } + + // convert XML2JS attributes to our attribut format + // (Allowing users to keep order and to add items not only at the bottom) + if (tag.$) { + baseTag.attrs = objectMap(tag.$, (v, k) => ({ key: k, value: v })) + } + + // convert childs + if (tag.$$) { + baseTag.childs = tag.$$ + .map((subTag) => { + // if child is a string + if (subTag['#name'] === '__text__') { + return subTag._ + } + + // convert child + return this.convert(subTag) + }) + // filter empty items + .filter((v) => !!v) + } + return baseTag + } + + /** + * Escape every XML characters + * @param str the base string + * @returns {string} the escaped string + */ + private static escape(str: string): string { + return str + .replace(/&/gu, '&') + .replace(/"/gu, '"') + .replace(/'/gu, ''') + .replace(//gu, '>') + } + +} diff --git a/src/libs/Form.ts b/src/libs/Form.ts new file mode 100644 index 0000000..bad8318 --- /dev/null +++ b/src/libs/Form.ts @@ -0,0 +1,159 @@ +import { objectFind, objectLoop } from '@dzeio/object-util' +import type { Model, ModelInfer, ValidationError, ValidationResult } from './Schema' +import Schema from './Schema' + +/** + * Handle most of the form validation and error reporting + * + * create a new one by using {@link Form.create} + * + * note: This library is made to work with {@link Model} + */ +export default class Form { + + private data?: ValidationResult> + private formData?: FormData + private schema: Schema + + private globalError?: string + private errors: Partial> = {} + private defaultValues: Partial> = {} + + /** + * Create a ready to use form + * @param model the model the form should respect + * @param request the request element + * @returns the Form object + */ + public static async create(model: M, request: Request) { + const fm = new Form(model, request) + await fm.init() + return fm + } + + private constructor(public readonly model: M, private readonly request: Request) { + this.schema = new Schema(model) + } + + /** + * you should not use this function by itself, it is called bu {@link Form.create} + */ + public async init() { + try { + if (this.request.method === 'POST') { + if (!(this.request.headers.get('Content-Type') ?? '').startsWith('multipart/form-data')) { + console.warn('form\'s content-type is not multipart/form-data') + } + this.formData = await this.request.formData() + this.data = this.schema.validateFormData(this.formData) as any + if (this.data?.error) { + for (const error of this.data.error) { + if (error.field) { + const field = error.field + if (field.includes('.')) { + this.errors[field.slice(0, field.indexOf('.')) as keyof M] = error.message + } else { + this.errors[error.field as keyof M] = error.message + } + } else { + this.globalError = error.message + } + } + } + } + } catch {} + } + + public defaultValue(name: keyof M, value: any) { + this.defaultValues[name] = value + return this + } + + public defaultObject(obj: Record) { + objectLoop(obj, (value, key) => { + this.defaultValue(key, value) + }) + return this + } + + /** + * indicate if the form is valid or not + * @returns if the form submitted is valid or not + */ + public isValid(): boolean { + if (this.request.method !== 'POST' || !this.data) { + return false + } + if (this.data.error) { + return false + } + return true + } + + /** + * + * @param message the error message + * @param key (optionnal) the specific key to apply the error to + */ + public setError(message: string, key?: keyof M) { + if (key) { + this.errors[key] = message + } else { + this.globalError = message + } + } + + public getError(key?: keyof M): string | undefined { + if (!key) { + return this.globalError + } + return this.errors[key] + } + + public hasError(key?: keyof M): boolean { + return !!this.getError(key) + } + + public getAnyError(): string | undefined { + if (this.globalError) { + return this.globalError + } + const other = objectFind(this.errors, (value) => !!value) + if (other) { + return `${other.key.toString()}: ${other.value}` + } + return undefined + } + + public hasAnyError(): boolean { + return !!this.getAnyError() + } + + public attrs(key: keyof M) { + return this.attributes(key) + } + + public attributes(key: keyof M): Record { + const schema = this.model[key] + if (!schema) { + return {} + } + const attrs: Record = { + name: key + } + if (!schema.attributes.includes('form:password')) { + const value: any = this.formData?.get(key as string) as string ?? this.defaultValues[key] + if (value instanceof Date) { + attrs.value = `${value.getFullYear().toString().padStart(4, '0')}-${(value.getMonth() + 1).toString().padStart(2, '0')}-${value.getDate().toString().padStart(2, '0')}` + } else { + attrs.value = value + } + } + + return attrs + } + + public getData(): ModelInfer { + return this.data!.object! + } +} diff --git a/src/libs/S3.ts b/src/libs/S3.ts new file mode 100644 index 0000000..b2251b8 --- /dev/null +++ b/src/libs/S3.ts @@ -0,0 +1,97 @@ +import type { APIRoute } from 'astro' +import fs from 'node:fs/promises' +import ResponseBuilder from './ResponseBuilder' + + +/** + * Easily setup an S3 system right in your own API + * + * ex: (create a `[...path].ts` file and put this inside) + * ``` + * import S3 from 'libs/S3' + * + * // root path of the storage + * const s3 = new S3('./.data') + * + * export const GET = s3.GET + * export const PUT = s3.PUT + * export const DELETE = s3.DELETE + * ``` + */ +export default class S3 { + public constructor( + private readonly rootPath: string + ) {} + + public async getFile(path: string): Promise { + return fs.readFile(this.getFullPath(path)) + } + + public async putFile(path: string, data: Buffer, overwrite = false): Promise { + const fullPath = this.getFullPath(path) + if (!overwrite && await this.exists(fullPath)) { + return false + } + + const folder = fullPath.slice(0, fullPath.lastIndexOf('/')) + await fs.mkdir(folder, { recursive: true }) + + await fs.writeFile(this.getFullPath(path), data) + + return true + } + + public async deleteFile(path: string): Promise { + await fs.rm(this.getFullPath(path)) + return true + } + + public GET: APIRoute = async (ctx) => { + const path = ctx.params.path! + const file = await this.getFile(path) + + return new ResponseBuilder() + .body(file) + .addHeader('Content-Disposition', `attachment; filename="${path}"`) + .status(200) + .build() + } + + public PUT: APIRoute = async (ctx) => { + const path = ctx.params.path! + const data = await ctx.request.arrayBuffer() + const bfr = Buffer.from(data) + const ok = await this.putFile(path, bfr) + + return new ResponseBuilder() + .body({ + path: path, + size: bfr.byteLength + }) + .status(ok ? 201 : 400) + .build() + } + + public DELETE: APIRoute = async (ctx) => { + const path = ctx.params.path! + + await this.deleteFile(path) + + return new ResponseBuilder() + .status(200) + .build() + } + + private async exists(path: string): Promise { + try { + await fs.stat(path) + return true + } catch { + return false + } + } + + private getFullPath(path: string): string { + return this.rootPath + '/' + path + } +} diff --git a/src/libs/Schema/Items/DzeioLiteral.ts b/src/libs/Schema/Items/DzeioLiteral.ts new file mode 100644 index 0000000..59be327 --- /dev/null +++ b/src/libs/Schema/Items/DzeioLiteral.ts @@ -0,0 +1,23 @@ +import SchemaItem, { type JSONSchemaItem } from '../SchemaItem' + +export default class DzeioLiteral extends SchemaItem { + public constructor(private readonly value: T) { + super() + this.validations.push({ + fn(input) { + return input === value + } + }) + } + + public override isOfType(input: unknown): input is T { + return typeof input === typeof this.value + } + + public override toJSON(): JSONSchemaItem { + return { + type: 'literal', + params: [this.value as string] + } + } +} diff --git a/src/libs/Schema/Items/SchemaArray.ts b/src/libs/Schema/Items/SchemaArray.ts new file mode 100644 index 0000000..a8eebdd --- /dev/null +++ b/src/libs/Schema/Items/SchemaArray.ts @@ -0,0 +1,93 @@ +import type { ValidationError, ValidationResult } from '..' +import SchemaItem from '../SchemaItem' + +export default class SchemaArray extends SchemaItem> { + + public constructor( + private readonly values: SchemaItem + ) { + super() + } + + public override parse(input: unknown): A[] | unknown { + // let master handle the first pass is desired + input = super.parse(input) + + if (!Array.isArray(input)) { + return input + } + + const clone = [] + for (const item of input) { + clone.push(this.values.parse(item)) + } + + return clone + } + + public override validate(input: A[], fast = false): ValidationResult { + const tmp = super.validate(input, fast) + if (tmp.error) { + return tmp + } + const clone: Array = [] + const errs: Array = [] + for (let idx = 0; idx < tmp.object.length; idx++) { + const item = tmp.object[idx]; + const res = this.values.validate(item as A) + if (res.error) { + const errors = res.error.map((it) => ({ + message: it.message, + field: it.field ? `${idx}.${it.field}` : idx.toString() + })) + if (fast) { + return { + error: errors + } + } + errs.push(...errors) + } else { + clone.push(res.object as A) + } + } + + if (errs.length > 0) { + return { + error: errs + } + } + + return { + object: clone + } + } + + public override transform(input: A[]): A[] { + const clone = [] + for (const item of super.transform(input)) { + clone.push(this.values.transform(item)) + } + + return clone + } + + /** + * transform the array so it only contains one of each elements + */ + public unique(): this { + this.transforms.push((input) => input.filter((it, idx) => input.indexOf(it) === idx)) + + return this + } + + public override isOfType(input: unknown): input is Array { + return Array.isArray(input) + } + + // public override toJSON(): JSONSchemaItem { + // return { + // type: 'array', + // childs: this.values + // } + // } +} diff --git a/src/libs/Schema/Items/SchemaBoolean.ts b/src/libs/Schema/Items/SchemaBoolean.ts new file mode 100644 index 0000000..574df90 --- /dev/null +++ b/src/libs/Schema/Items/SchemaBoolean.ts @@ -0,0 +1,8 @@ +import SchemaItem from '../SchemaItem' + +export default class SchemaBoolean extends SchemaItem { + + public override isOfType(input: unknown): input is boolean { + return typeof input === 'boolean' + } +} diff --git a/src/libs/Schema/Items/SchemaDate.ts b/src/libs/Schema/Items/SchemaDate.ts new file mode 100644 index 0000000..7bcaebb --- /dev/null +++ b/src/libs/Schema/Items/SchemaDate.ts @@ -0,0 +1,48 @@ +import SchemaItem from '../SchemaItem' + +export default class SchemaDate extends SchemaItem { + + public parseString(): this { + this.parseActions.push((input) => typeof input === 'string' ? new Date(input) : input) + return this + } + + public min(value: Date, message?: string): this { + this.validations.push({ + fn(input) { + return input >= value + }, + message: message + }) + + return this + } + + public parseFromExcelString(): this { + this.parseActions.push((input) => { + if (typeof input !== 'string') { + return input + } + const days = parseFloat(input) + const millis = days * 24 * 60 * 60 * 1000 + const date = new Date('1900-01-01') + date.setTime(date.getTime() + millis) + return date + }) + return this + } + + public max(value: Date, message?: string): this { + this.validations.push({ + fn(input) { + return input <= value + }, + message: message + }) + return this + } + + public override isOfType(input: unknown): input is Date { + return input instanceof Date && !isNaN(input.getTime()) + } +} diff --git a/src/libs/Schema/Items/SchemaFile.ts b/src/libs/Schema/Items/SchemaFile.ts new file mode 100644 index 0000000..9d5e57b --- /dev/null +++ b/src/libs/Schema/Items/SchemaFile.ts @@ -0,0 +1,21 @@ +import SchemaItem from '../SchemaItem' + +export default class SchemaFile extends SchemaItem { + constructor () { + super() + this.parseActions.push((input) => this.isOfType(input) && input.size > 0 ? input : undefined) + } + + public extension(ext: string, message?: string): this { + this.validations.push({ + fn: (input) => input.name.endsWith(ext), + message + }) + + return this + } + + public override isOfType(input: unknown): input is File { + return input instanceof File + } +} diff --git a/src/libs/Schema/Items/SchemaNullable.ts b/src/libs/Schema/Items/SchemaNullable.ts new file mode 100644 index 0000000..9b51377 --- /dev/null +++ b/src/libs/Schema/Items/SchemaNullable.ts @@ -0,0 +1,65 @@ +import type { ValidationResult } from '..' +import SchemaItem from '../SchemaItem' +import { isNull } from '../utils' + +export default class SchemaNullable extends SchemaItem { + + public constructor(private readonly item: SchemaItem) { + super() + } + + public emptyAsNull(): this { + this.parseActions.push((input) => { + if (typeof input === 'string' && input === '') { + return null + } + return input + }) + + return this + } + + public falthyAsNull(): this { + this.parseActions.push((input) => { + if (!input) { + return null + } + return input + }) + + return this + } + + public override transform(input: A | null | undefined): A | null | undefined { + const transformed = super.transform(input) + + if (isNull(transformed) || isNull(input)) { + return transformed + } + + return this.item.transform(input) + } + + public override validate(input: A | null | undefined): ValidationResult { + if (isNull(input)) { + return { + object: input + } + } + return this.item.validate(input) + } + + public override parse(input: unknown): (A | null | undefined) | unknown { + const parsed = super.parse(input) + + if (isNull(parsed) || isNull(input)) { + return parsed + } + + return this.item.parse(input) + } + + public override isOfType(input: unknown): input is A | undefined | null { + return isNull(input) || this.item.isOfType(input) + } +} diff --git a/src/libs/Schema/Items/SchemaNumber.ts b/src/libs/Schema/Items/SchemaNumber.ts new file mode 100644 index 0000000..738e40f --- /dev/null +++ b/src/libs/Schema/Items/SchemaNumber.ts @@ -0,0 +1,89 @@ +import SchemaItem from '../SchemaItem' + +export default class SchemaNumber extends SchemaItem { + + public min(...params: Parameters): this { + return this.gte(...params) + } + + public max(...params: Parameters): this { + return this.lte(...params) + } + + /** + * validate that the number is less or equal than {@link value} + * @param value the maxumum value (inclusive) + * @param message the message sent if not valid + */ + public lte(value: number, message?: string): this { + this.validations.push({ + fn(input) { + return input <= value + }, + message: message + }) + + return this + } + + /** + * validate that the number is more or equal than {@link value} + * @param value the minimum value (inclusive) + * @param message the message sent if not valid + */ + public gte(value: number, message?: string): this { + this.validations.push({ + fn(input) { + return input >= value + }, + message: message + }) + return this + } + + /** + * validate that the number is less than {@link value} + * @param value the maxumum value (exclusive) + * @param message the message sent if not valid + */ + public lt(value: number, message?: string): this { + this.validations.push({ + fn(input) { + return input < value + }, + message: message + }) + + return this + } + + /** + * validate that the number is more than {@link value} + * @param value the minimum value (exclusive) + * @param message the message sent if not valid + */ + public gt(value: number, message?: string): this { + this.validations.push({ + fn(input) { + return input > value + }, + message: message + }) + return this + } + + /** + * Try to parse strings before validating + */ + public parseString(): this { + this.parseActions.push((input) => + typeof input === 'string' ? Number.parseFloat(input) : input + ) + + return this + } + + public override isOfType(input: unknown): input is number { + return typeof input === 'number' && !Number.isNaN(input) + } +} diff --git a/src/libs/Schema/Items/SchemaRecord.ts b/src/libs/Schema/Items/SchemaRecord.ts new file mode 100644 index 0000000..571c0e6 --- /dev/null +++ b/src/libs/Schema/Items/SchemaRecord.ts @@ -0,0 +1,88 @@ +import { isObject, objectLoop, objectRemap } from '@dzeio/object-util' +import type { ValidationError, ValidationResult } from '..' +import SchemaItem from '../SchemaItem' + +export default class SchemaRecord extends SchemaItem> { + + public constructor( + private readonly key: SchemaItem, + private readonly values: SchemaItem + ) { + super() + } + + public override parse(input: unknown): unknown { + input = super.parse(input) + + if (!this.isOfType(input)) { + return input + } + + const finalObj: Record = {} as Record + const error = objectLoop(input, (value, key) => { + const res1 = this.key.parse(key) + const res2 = this.values.parse(value) + if (typeof res1 !== 'string' && typeof res1 !== 'number') { + return false + } + // @ts-expect-error normal behavior + finalObj[res1] = res2 + return true + }) + if (error) { + return input + } + return finalObj + } + + public override transform(input: Record): Record { + return objectRemap(super.transform(input), (value, key) => { + return { + key: this.key.transform(key), + value: this.values.transform(value) + } + }) + } + + public override validate(input: Record, fast = false): ValidationResult> { + const tmp = super.validate(input) + if (tmp.error) { + return tmp + } + + const errs: Array = [] + const finalObj: Record = {} as Record + + objectLoop(tmp.object, (value, key) => { + const res1 = this.key.validate(key) + const res2 = this.values.validate(value) + const localErrs = (res1.error ?? []).concat(...(res2.error ?? [])) + if (localErrs.length > 0) { + errs.push(...localErrs.map((it) => ({ + message: it.message, + field: it.field ? `${key as string}.${it.field}` : key.toString() + }))) + return !fast + } else { + // @ts-expect-error the check in the if assure the typing below + finalObj[res1.object] = res2.object + } + + return true + }) + + if (errs.length > 0) { + return { + error: errs + } + } + + return { + object: finalObj + } + } + + public override isOfType(input: unknown): input is Record { + return isObject(input) && Object.prototype.toString.call(input) === '[object Object]' + } +} diff --git a/src/libs/Schema/Items/SchemaString.ts b/src/libs/Schema/Items/SchemaString.ts new file mode 100644 index 0000000..de09bf8 --- /dev/null +++ b/src/libs/Schema/Items/SchemaString.ts @@ -0,0 +1,76 @@ +import SchemaItem from '../SchemaItem' +import SchemaNullable from './SchemaNullable' + +export default class SchemaString extends SchemaItem { + /** + * force the input text to be a minimum of `value` size + * @param value the minimum length of the text + * @param message the message to display on an error + */ + public min(value: number, message?: string): SchemaString { + this.validations.push({ + fn(input) { + return input.length >= value + }, + message: message + }) + + return this + } + + /** + * force the input text to be a maximum of `value` size + * @param value the maximum length of the text + * @param message the message to display on an error + */ + public max(value: number, message?: string): SchemaString { + this.validations.push({ + fn(input) { + return input.length <= value + }, + message: message + }) + return this + } + + /** + * the value must not be empty (`''`) + * @param message + * @returns + */ + public notEmpty(message?: string): this { + this.validations.push({ + fn(input) { + return input !== '' + }, + message: message + }) + return this + } + + /** + * note: this nullable MUST be used last as it change the type of the returned function + */ + public nullable() { + return new SchemaNullable(this) + } + + /** + * force the input text to respect a Regexp + * @param regex the regex to validate against + * @param message the message to display on an error + */ + public regex(regex: RegExp, message?: string): SchemaString { + this.validations.push({ + fn(input) { + return regex.test(input) + }, + message + }) + return this + } + + public override isOfType(input: unknown): input is string { + return typeof input === 'string' + } +} diff --git a/src/libs/Schema/README.md b/src/libs/Schema/README.md new file mode 100644 index 0000000..8415edd --- /dev/null +++ b/src/libs/Schema/README.md @@ -0,0 +1,25 @@ +a Full featured and lightweight Schema validation/parsing library + +it is meant to be used for input validation + +example : + +```ts +import Schema, { s, type SchemaInfer } from 'libs/Schema' + +const schema = new Schema({ + test: s.record(s.string(), s.object({ + a: s.number().parseString().min(3, 'a is too small') + })) +}) + +const t = { + test: { + b: {a: '34'} + } +} + +// validate that `t` is coherant with the schema above +const { object, error } = schema.validate(t) +console.log(object, error) +``` diff --git a/src/libs/Schema/SchemaItem.ts b/src/libs/Schema/SchemaItem.ts new file mode 100644 index 0000000..1aee20e --- /dev/null +++ b/src/libs/Schema/SchemaItem.ts @@ -0,0 +1,169 @@ +import type { ValidationResult } from '.' +import Schema from '.' +import { isNull } from './utils' + +export interface Messages { + globalInvalid: string +} + +/** + * An element of a schema + */ +export default abstract class SchemaItem { + /** + * get additionnal attributes used to make the Schema work with outside libs + */ + public attributes: Array = [] + + /** + * the list of validations + */ + protected validations: Array<{ + fn: (input: T) => boolean + message?: string | undefined + }> = [] + + protected parseActions: Array<(input: unknown) => T | unknown> = [] + protected transforms: Array<(input: T) => T> = [] + + /** + * set the list of attributes for the item of the schema + * @param attributes the attributes + */ + public attr(...attributes: Array) { + this.attributes = attributes + return this + } + + /** + * set the default value of the schema element + * @param value the default value + * @param strict if strict, it will use it for null/undefined, else it will check for falthy values + */ + public defaultValue(value: T, strict = true) { + this.parseActions.push((input) => { + if (strict && isNull(input)) { + return value + } + if (!value) { + return input + } + return input + }) + + return this + } + + /** + * + * @param values the possible values the field can be + * @param message the message returned if it does not respect the value + */ + public in(values: Array, message?: string) { + this.validations.push({ + fn: (input) => values.includes(input), + message + }) + + return this + } + + /** + * Try to parse the input from another format + * + * @param input the input to transform, it is done before validation, so the value can be anything + * @returns the transformed value + */ + public parse(input: unknown): T | unknown { + for (const transform of this.parseActions) { + const tmp = transform(input) + if (this.isOfType(tmp)) { + return tmp + } + } + + return input + } + + /** + * transform a valid value + * + * @param input the input to transform, it MUST be validated beforehand + * @returns the transformed value + */ + public transform(input: T): T { + let res = input + + for (const action of this.transforms) { + res = action(res) + } + + return res + } + + /** + * validate that the input is valid or not + * @param input the input to validate + * @param fast if true the validation stops at the first error + * @returns a string if it's not valid, else null + */ + public validate(input: T, fast = false): ValidationResult { + for (const validation of this.validations) { + if (!validation.fn(input as T)) { + return { + error: [{ + message: validation.message ?? Schema.messages.globalInvalid + }] + } + } + } + return { + object: input as T + } + } + + /** + * validate that the input value is of the type of the schema item + * + * it makes others functions easier to works with + * @param input the input to validate + */ + public abstract isOfType(input: unknown): input is T + + // public abstract toJSON(): JSONSchemaItem +} + +type Parseable = string | number | boolean + +export interface ValidatorJSON { + /** + * the function name (ex: `min`, `max`) + */ + name: string + /** + * the function parameters + */ + params?: Array +} + +export interface JSONSchemaItem { + /** + * Schema item + * + * ex: `string`, `number`, `boolean`, ... + */ + type: string + /** + * constructor params + */ + params?: Array + /** + * list of attributes + */ + attributes?: Array + actions?: Array +} + +export type JSONSchema = { + [a: string]: JSONSchemaItem +} diff --git a/src/libs/Schema/index.ts b/src/libs/Schema/index.ts new file mode 100644 index 0000000..1d08bf0 --- /dev/null +++ b/src/libs/Schema/index.ts @@ -0,0 +1,217 @@ +import { isObject, objectLoop } from '@dzeio/object-util' +import DzeioLiteral from './Items/DzeioLiteral' +import SchemaArray from './Items/SchemaArray' +import SchemaBoolean from './Items/SchemaBoolean' +import SchemaDate from './Items/SchemaDate' +import SchemaFile from './Items/SchemaFile' +import SchemaNullable from './Items/SchemaNullable' +import SchemaNumber from './Items/SchemaNumber' +import SchemaRecord from './Items/SchemaRecord' +import SchemaString from './Items/SchemaString' +import SchemaItem from './SchemaItem' + +export interface ValidationError { + message: string + field?: string + value?: unknown +} + +export type ValidationResult = { + object: T + error?: undefined +} | { + object?: undefined + error: Array +} + +// biome-ignore lint/suspicious/noExplicitAny: +export type Model = Record> + +export type SchemaInfer = ModelInfer + +export type ModelInfer = { + [key in keyof M]: ReturnType +} + +/** + * A schema to validate input or external datas + */ +export default class Schema extends SchemaItem> { + + public static messages = { + typeInvalid: 'Type of field is not valid', + notAnObject: 'the data submitted is not valid', + globalInvalid: 'the field is invalid' + } + + public constructor(public readonly model: M) { + super() + } + + /** + * See {@link SchemaString} + */ + public static string( + ...inputs: ConstructorParameters + ) : SchemaString { + return new SchemaString(...inputs) + } + + public static file( + ...inputs: ConstructorParameters + ): SchemaFile { + return new SchemaFile(...inputs) + } + + public static number( + ...inputs: ConstructorParameters + ): SchemaNumber { + return new SchemaNumber(...inputs) + } + + public static date( + ...inputs: ConstructorParameters + ): SchemaDate { + return new SchemaDate(...inputs) + } + + public static literal( + ...inputs: ConstructorParameters> + ): DzeioLiteral { + return new DzeioLiteral(...inputs) + } + + public static object( + ...inputs: ConstructorParameters> + ): Schema { + return new Schema(...inputs) + } + + public static record( + ...inputs: ConstructorParameters> + ): SchemaRecord { + return new SchemaRecord(...inputs) + } + + public static array( + ...inputs: ConstructorParameters> + ): SchemaArray { + return new SchemaArray(...inputs) + } + + public static nullable( + ...inputs: ConstructorParameters> + ): SchemaNullable { + return new SchemaNullable(...inputs) + } + + public static boolean( + ...inputs: ConstructorParameters + ): SchemaBoolean { + return new SchemaBoolean(...inputs) + } + + /** + * @param query the URL params to validate + * @returns + */ + public validateQuery(query: URLSearchParams, fast = false): ReturnType['validate']> { + const record: Record = {} + for (const [key, value] of query) { + record[key] = value + } + + return this.validate(record, fast) + } + + /** + * @param form the form to validate + */ + public validateForm(form: HTMLFormElement, fast = false): ReturnType['validate']> { + const data = new FormData(form) + return this.validateFormData(data, fast) + } + + /** + * @param data the FormData to validate + * @returns + */ + public validateFormData(data: FormData, fast = false): ReturnType['validate']> { + const record: Record = {} + for (const [key, value] of data) { + const isArray = this.model[key]?.isOfType([]) ?? false + record[key] = isArray ? data.getAll(key) : value + } + + return this.validate(record, fast) + } + + /** + * + * @param input the data to validate + * @param options additionnal validation options + * @returns blablabla + */ + public override validate(input: unknown, fast = false): ValidationResult> { + if (!isObject(input)) { + return { + error: [{ + message: Schema.messages.notAnObject + }] + } + } + + const errors: Array = [] + // biome-ignore lint/suspicious/noExplicitAny: + const res: ModelInfer = {} as any + objectLoop(this.model, (v, k) => { + // parse value from other formats + const value = v.parse(input[k]) + + // validate that the value is of type + if (!v.isOfType(value)) { + errors.push({ + message: Schema.messages.typeInvalid, + field: k, + value: value + }) + return !fast + } + + // run validations + const invalid = v.validate(value) + if (invalid.error) { + errors.push(...invalid.error.map((it) => ({ + message: it.message, + field: it.field ? `${k}.${it.field}` : k + }))) + return !fast + } + + // transform and assign final value + // @ts-expect-error normal behavior + res[k] = v.transform(value) + + return true + }) + + if (errors.length > 0) { + return { + error: errors + } + } + + return { + object: res + } + } + + public override isOfType(input: unknown): input is ModelInfer { + return isObject(input) + } +} + +/** + * alias of {@link Schema} + */ +export const s = Schema diff --git a/src/libs/Schema/utils.ts b/src/libs/Schema/utils.ts new file mode 100644 index 0000000..86f6ec6 --- /dev/null +++ b/src/libs/Schema/utils.ts @@ -0,0 +1,3 @@ +export function isNull(value: unknown): value is undefined | null { + return typeof value === 'undefined' || value === null +} diff --git a/src/models/Adapters/AdapterUtils.ts b/src/models/Adapters/AdapterUtils.ts new file mode 100644 index 0000000..bcfd294 --- /dev/null +++ b/src/models/Adapters/AdapterUtils.ts @@ -0,0 +1,234 @@ +import { objectFind, objectLoop } from '@dzeio/object-util' +import { Sort, type Query, type QueryList, type QueryValues } from 'models/Query' + +export declare type AllowedValues = string | number | bigint | boolean | null | undefined + +// eslint-disable-next-line complexity +export function filter(query: Query, results: Array, options?: { debug?: boolean }): {filtered: Array, unpaginatedLength: number} { + if (options?.debug) { + console.log('Query', query) + } + // filter + let filtered = results.filter((it) => { + const res = objectLoop(query, (value, key) => { + if (key === '$or') { + for (const sub of value as any) { + const final = filterEntry(sub, it) + // eslint-disable-next-line max-depth + if (final) { + return true + } + } + return false + } + if ((key as string).startsWith('$')) { + return true + } + return filterEntry(query, it) + }) + // console.log(it, res) + return res + }) + if (options?.debug) { + console.log('postFilters', filtered) + } + + // sort + if (query.$sort) { + // temp until better solution is found + const first = objectFind(query.$sort, () => true) + filtered = filtered.sort((objA, objB) => { + const a = objA[first!.key] + const b = objB[first!.key] + const ascend = first?.value !== Sort.DESC + if (typeof a === 'number' && typeof b === 'number') { + if (ascend) { + return b - a + } else { + return a - b + } + } + if (a instanceof Date && b instanceof Date) { + if (ascend) { + return a.getTime() - b.getTime() + } else { + return b.getTime() - a.getTime() + } + } + if (typeof a === 'string' && typeof b === 'string') { + if (ascend) { + return a.localeCompare(b) + } else { + return b.localeCompare(a) + } + + } + if (ascend) { + return a > b ? 1 : -1 + } + return a > b ? -1 : 1 + }) + } + if (options?.debug) { + console.log('postSort', filtered) + } + + // length of the query assuming a single page + const unpaginatedLength = filtered.length + // limit + if (query.$offset || query.$limit) { + const offset = query.$offset ?? 0 + filtered = filtered.slice(offset, offset + (query.$limit ?? Infinity)) + } + if (options?.debug) { + console.log('postLimit', filtered) + } + + return { filtered, unpaginatedLength } +} + +/** + * + * @param query the query of the entry + * @param item the implementation of the item + * @returns if it should be kept or not + */ +export function filterEntry(query: QueryList, item: T): boolean { + // eslint-disable-next-line complexity + const res = objectLoop(query as any, (queryValue, key: keyof typeof query) => { + /** + * TODO: handle $keys + */ + if ((key as string).startsWith('$')) { + return true + } + + return filterValue(item[key], queryValue) + }) + + return res +} + +/** + * indicate if a value should be kept by an ENTIRE query + * + * @param value the value to filter + * @param query the full query + * @returns if the query should keep the value or not + */ +function filterValue(value: any, query: QueryValues) { + if (typeof query !== 'object' || query === null || query instanceof RegExp || Array.isArray(query)) { + return filterItem(value, query) + } + + // loop through each keys of the query + // eslint-disable-next-line arrow-body-style + return objectLoop(query, (querySubValue: any, queryKey: any) => { + return filterItem(value, {[queryKey]: querySubValue } as QueryValues) + }) +} + +/** + * + * @param value the value to check + * @param query a SINGLE query to check against + * @returns if the value should be kept or not + */ +// eslint-disable-next-line complexity +function filterItem(value: any, query: QueryValues): boolean { + /** + * check if the value is null + */ + if (query === null) { + return typeof value === 'undefined' || value === null + } + + if (query instanceof RegExp) { + return query.test(typeof value === 'string' ? value : value.toString()) + } + + /** + * ?!? + */ + if (value === null || typeof value === 'undefined') { + return false + } + + /** + * strict value check by default + */ + if (!(typeof query === 'object')) { + return query === value + } + + /** + * Array checking and $in + */ + if (Array.isArray(query) || '$in' in query) { + const arr = Array.isArray(query) ? query : query.$in as Array + return arr.includes(value) + } + + if ('$inc' in query) { + return (value.toString() as string).toLowerCase().includes(query.$inc!.toString()!.toLowerCase()) + } + + if ('$eq' in query) { + return query.$eq === value + } + + /** + * numbers specific cases for numbers + */ + if ('$gt' in query) { + value = value instanceof Date ? value.getTime() : value + const comparedValue = query.$gt instanceof Date ? query.$gt.getTime() : query.$gt + return typeof value === 'number' && typeof comparedValue === 'number' && value > comparedValue + } + + if ('$lt' in query) { + value = value instanceof Date ? value.getTime() : value + const comparedValue = query.$lt instanceof Date ? query.$lt.getTime() : query.$lt + return typeof value === 'number' && typeof comparedValue === 'number' && value < comparedValue + } + + if ('$gte' in query) { + value = value instanceof Date ? value.getTime() : value + const comparedValue = query.$gte instanceof Date ? query.$gte.getTime() : query.$gte + return typeof value === 'number' && typeof comparedValue === 'number' && value >= comparedValue + } + + if ('$lte' in query) { + value = value instanceof Date ? value.getTime() : value + const comparedValue = query.$lte instanceof Date ? query.$lte.getTime() : query.$lte + return typeof value === 'number' && typeof comparedValue === 'number' && value <= comparedValue + } + + if ('$len' in query && Array.isArray(value)) { + return value.length === query.$len + } + + /** + * Logical Operators + */ + if ('$or' in query && Array.isArray(query.$or)) { + return !!query.$or.find((it) => filterValue(value, it as QueryValues)) + } + if ('$and' in query && Array.isArray(query.$and)) { + return !query.$and.find((it) => !filterValue(value, it as QueryValues)) + } + + if ('$not' in query) { + return !filterValue(value, query.$not as QueryValues) + } + + if ('$nor' in query && Array.isArray(query.$nor)) { + return !query.$nor.find((it) => filterValue(value, it as QueryValues)) + } + + if ('$nand' in query && Array.isArray(query.$nand)) { + return !!query.$nand.find((it) => !filterValue(value, it as QueryValues)) + } + + return false +} diff --git a/src/models/Adapters/CSVAdapter.ts b/src/models/Adapters/CSVAdapter.ts new file mode 100644 index 0000000..911759b --- /dev/null +++ b/src/models/Adapters/CSVAdapter.ts @@ -0,0 +1,54 @@ +import CSV, { type CSVOptions } from 'libs/FileFormats/CSV' +import type Schema from 'libs/Schema' +import type { SchemaInfer } from 'libs/Schema' +import fs from 'node:fs' +import type { Query } from '../Query' +import { filter } from './AdapterUtils' +import type DaoAdapter from './DaoAdapter' +import type { DBPull } from './DaoAdapter' + + +export default class CSVAdapter implements DaoAdapter { + + private data: Array> + + public constructor( + public readonly schema: T, + public readonly serverPath: string, + private readonly csvOptions?: CSVOptions + ) { + const data = fs.readFileSync(serverPath, 'utf-8') + this.data = CSV.parse(data, csvOptions) as Array> + } + + public async create(_obj: Partial>): Promise | null> { + throw new Error('not implemented') + } + + // eslint-disable-next-line complexity + public async read(query?: Query> | undefined): Promise> { + + const res = filter(query ?? {}, this.data) + + return { + rows: res.filtered.length, + rowsTotal: res.unpaginatedLength, + page: 1, + pageTotal: 1, + data: res.filtered + } + } + + public async update(_obj: SchemaInfer): Promise | null> { + throw new Error('not implemented') + } + + public async patch(_id: string, _obj: Partial>): Promise | null> { + throw new Error('not implemented') + + } + + public async delete(obj: SchemaInfer): Promise { + throw new Error('not implemented') + } +} diff --git a/src/models/Adapters/CassandraAdapter.ts.old b/src/models/Adapters/CassandraAdapter.ts.old new file mode 100644 index 0000000..06757bd --- /dev/null +++ b/src/models/Adapters/CassandraAdapter.ts.old @@ -0,0 +1,433 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ +import { objectFind, objectKeys, objectLoop, objectMap, objectRemap, objectValues } from '@dzeio/object-util' +import type { ArrayOrObject } from 'cassandra-driver' +import crypto from 'node:crypto' +import { Sort, type Query } from '../Query' +import { filter } from './AdapterUtils' +import type DaoAdapter from './DaoAdapter' +import type { DBPull } from './DaoAdapter' + +/** + * @deprecated need to be moved to the new Schema system + * + * (won't be done because we mostly won't be using it again...) + */ +export default class CassandraAdapter implements DaoAdapter { + + private id!: Array + + public constructor( + /** + * the schema used by Cassandra + */ + public readonly schema: Schema, + /** + * the table name + */ + public readonly table: string, + /** + * the id(s) + */ + id?: keyof T | Array, + + /** + * other secondary keys necessary to update data + */ + private readonly partitionKeys?: Array, + /** + * additionnal options to make the adapter work + */ + public readonly options: { + /** + * log the requests made to cassandra + */ + debug?: boolean + } = {} + ) { + if (!id) { + objectLoop(schema.model, (value, key) => { + if (!isSchemaItem(value)) { + return true + } + if (!value.database?.unique) { + return true + } + id = key + return false + }) + } else { + this.id = typeof id === 'string' ? [id] : id as Array + } + } + + // TODO: make it clearer what it does + public async create(obj: Partial>): Promise | null> { + + objectLoop(this.schema.model, (item, key) => { + if (isSchemaItem(item) && (item.database?.created || item.database?.updated)) { + // @ts-expect-error things get validated anyway + obj[key] = new Date() + } else if (isSchemaItem(item) && item.database?.auto && !obj[key]) { + if (item.type === String) { + // @ts-expect-error things get validated anyway + obj[key] = crypto.randomBytes(16).toString('hex') + } else { + // @ts-expect-error things get validated anyway + obj[key] = crypto.randomBytes(16).readUint32BE() + } + } + }) + + const clone = this.schema.parse(obj) + if (!clone) { + throw new Error('Invalid data given to create the final object') + } + + const keys = objectKeys(clone) + const keysStr = keys.join(', ') + const values = keys.fill('?').join(', ') + const req = `INSERT INTO ${this.table} (${keysStr}) VALUES (${values});` + const client = (await Client.get())! + + const params = objectMap(clone as any, (value, key) => this.valueToDB(key as any, value)) + + if (this.options?.debug) { + console.log(req, params) + } + + try { + await client.execute(req, params, { prepare: true }) + } catch (e) { + console.log(e, req, params) + return null + } + return this.schema.parse(clone) + } + + // eslint-disable-next-line complexity + public async read(query?: Query> | undefined): Promise> { + let req: Array = ['SELECT', '*', 'FROM', this.table] + const params: ArrayOrObject = [] + + // list of the differents items in the WHERE statement + const whereItems: Array = [] + // if ((query?.where?.length ?? 0) > 0 && (query?.where?.length !== 1 || query?.where?.[0]?.[1] !== 'includes')) { + // for (const it of query?.where ?? []) { + // // eslint-disable-next-line max-depth + // switch (it[1]) { + // case 'in': + // // eslint-disable-next-line no-case-declarations + // const arr = it[2] as Array + + // whereItems.push(`${String(it[0])} IN (${arr.map(() => '?').join(',')})`) + // params.push(...arr) + // break + + // case 'equal': + // whereItems.push(`${String(it[0])} = ?`) + // params.push(it[2]) + // break + + // case 'after': + // whereItems.push(`${String(it[0])} >= ?`) + // params.push(it[2]) + // break + + // case 'before': + // whereItems.push(`${String(it[0])} <= ?`) + // params.push(it[2]) + // break + // } + // } + // } + + if (whereItems.length > 0) { + req.push('WHERE') + for (let idx = 0; idx < whereItems.length; idx++) { + const item = whereItems[idx] as string + if (idx > 0) { + req.push('AND') + } + req.push(item) + } + } + + // ORDER BY (not working as we want :() + // const sort = query?.$sort + // if (sort && sort.length >= 1) { + // const suffix = sort[0]?.[1] === 'asc' ? 'ASC' : 'DESC' + // req = req.concat(['ORDER', 'BY', sort[0]?.[0] as string, suffix]) + // } + + // LIMIT (not working because of ORDER BY) + // const page: number = query?.page ?? 0 + // const pageLimit: number | null = query?.limit ?? null + // let limit: number | null = null + // if (pageLimit && pageLimit > 0) { + // limit = pageLimit * (page + 1) + // req = req.concat(['LIMIT', limit.toString()]) + // } + + // ALLOWW FILTERING + req = req.concat(['ALLOW', 'FILTERING']) + const client = (await Client.get())! + + if (this.options?.debug) { + console.log(req, params) + } + + let res: Array> + try { + res = await client.execute(req.join(' '), params) + } catch (error) { + console.error('error running request') + console.error(req, params) + throw error + } + if (!res) { + return { + rows: 0, + pageTotal: 0, + page: 1, + rowsTotal: 0, + data: [] + } + } + + let dataset = res + .map((obj) => objectRemap(this.schema.model, (_, key) => ({ + key, + value: this.dbToValue(key, obj[key]) + }))) + .map((obj) => { + objectLoop(this.schema.model, (item, key) => { + if (Array.isArray(item) && !obj[key]) { + obj[key] = [] + } + }) + + return obj + }) + .map((it) => this.schema.parse(it)) + .filter((it): it is Implementation => !!it) + + /** + * POST QUERY TREATMENT + */ + // if ((query?.where?.length ?? 0) > 0) { + // for (const it of query?.where ?? []) { + // // eslint-disable-next-line max-depth + // switch (it[1]) { + // case 'includes': + // dataset = dataset.filter((entry) => entry[it[0]]?.toString()?.includes(it[2])) + // break + // } + // } + // } + + // sort + // const sort = query?.$sort + // if (sort) { + // const sortKey = sort ? sort[0]![0] : objectFind(this.schema.model, (value) => { + // if (!isSchemaItem(value)) { + // return false + // } + // return !!value.database?.created + // }) + // const sortValue = sort ? sort[0]![1] : 'asc' + // if (sortKey && sortValue) { + // if (sortValue === 'asc') { + // dataset = dataset.sort((a, b) => b[sortKey as string]! > a[sortKey as string]! ? 1 : -1) + // } else { + // dataset = dataset.sort((a, b) => b[sortKey as string]! < a[sortKey as string]! ? 1 : -1) + // } + // } + // } + + // console.log(res.rows, req) + // post request processing + // if (limit) { + // dataset = dataset.slice(page * (query?.limit ?? 0), limit) + // } + + // length of the query assuming a single page + let unpaginatedLength = dataset.length + // temp modification of comportement to use the new and better query system + if ((!query || !query?.$sort) && objectFind(this.schema.model, (_, key) => key === 'created')) { + // temp fix for the sorting algorithm + if (!query) { + // @ts-expect-error normal currently + query = { $sort: { created: Sort.DESC }} + } else { + query.$sort = { created: Sort.DESC } + } + } + if (query) { + const { filtered, unpaginatedLength: ul } = filter(query, dataset, this.options) + dataset = filtered + unpaginatedLength = ul + } + + // console.log(res) + const pageLimit = query?.$limit ?? 10 + const pageOffset = query?.$offset ?? 0 + return { + rows: dataset.length, + rowsTotal: unpaginatedLength, + page: Math.floor(pageOffset / pageLimit), + pageTotal: Math.max(1, Math.ceil(unpaginatedLength / pageLimit)), + data: dataset + } + } + + public async update(obj: Implementation): Promise | null> { + return this.patch(obj) + } + + public async patch(id: Partial>): Promise | null> + public async patch(id: string, obj: Partial>): Promise | null> + // eslint-disable-next-line complexity + public async patch(id: string | Partial>, obj?: Partial>): Promise | null> { + + if (!obj) { + if (typeof id === 'string') { + return null + } + obj = {...id} as Partial> + } + + // update the updated time + objectLoop(this.schema.model, (item, key) => { + if (isSchemaItem(item) && item.database?.updated) { + // @ts-expect-error things get validated anyway + obj[key] = new Date() + } + }) + + // build the request parts + const parts: Array = ['UPDATE', this.table, 'SET'] + const params: Array = [] + + // remove ids + const ids = Array.isArray(this.id) ? this.id : [this.id] + for (const tmp of ids) { + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete obj[tmp] + } + + // map the items to update + const keys = objectMap(obj as {}, (_, key) => `${key}=?`) + parts.push(keys.join(', ')) + params.push(...objectValues(obj as {})) + + // filter by the ids + parts.push('WHERE') + const read: Partial = {} + for (let idx = 0; idx < ids.length; idx++) { + const key = ids[idx] as string + + if (idx > 0) { + parts.push('AND') + } + parts.push(`${key}=?`) + const value = obj[key] ?? (typeof id === 'string' ? id : id[key]) + read[key] = this.valueToDB(key, value) + if (!value) { + throw new Error(`Missing id (${key})`) + } + params.push(value) + } + + if (this.partitionKeys && this.partitionKeys?.length > 0) { + const { data } = await this.read(read) + const item = data[0] + for (const key of this.partitionKeys) { + parts.push('AND', `${key as string}=?`) + params.push(this.valueToDB(key, item![key])) + } + } + + const req = parts.join(' ') + const client = await Client.get() + + if (this.options?.debug) { + console.log(req, params) + } + + try { + const res = await client?.execute(req, params) + // console.log(res, req) + if (this.options?.debug) { + console.log('post patch result', res, req) + } + return (await this.read(read)).data[0] ?? null + } catch (e) { + console.log(e, req, params) + } + return null + } + + public async delete(obj: Implementation): Promise { + const parts = ['DELETE', 'FROM', this.table, 'WHERE'] + const params: ArrayOrObject = [] + + objectLoop(obj as {}, (value, key) => { + let allowedWheres = ([] as Array).concat(Array.isArray(this.id) ? this.id : [this.id]) + if (this.partitionKeys) { + allowedWheres.push(...this.partitionKeys ) + } + if (!allowedWheres.includes(key)) { + return + } + if (parts.length > 4) { + parts.push('AND') + } + parts.push(`${key}=?`) + params.push(value) + }) + + const client = await Client.get() + + if (this.options?.debug) { + console.log(parts, params) + } + + try { + await client!.execute(parts.join(' '), params) + } catch (e) { + console.error(e, parts, params) + throw e + } + return true + } + + private valueToDB(key: keyof T, value: any): string | number | boolean | Date { + const item = this.schema.model[key] as Item + const type = isSchemaItem(item) ? item.type : item + + if (typeof type === 'object' && !Array.isArray(type) && !(value instanceof Date)) { + return JSON.stringify(value) + } + + if (typeof value === 'undefined' || value === null) { + return value + } + + return value + } + + private dbToValue(key: keyof T, value: string | number | boolean | Date): any { + const item = this.schema.model[key] as Item + const type = isSchemaItem(item) ? item.type : item + + if (typeof type === 'object' && !Array.isArray(type) && !(value instanceof Date)) { + return JSON.parse(value as string) + } + + if (typeof value === 'undefined' || value === null) { + return value + } + + return value + } +} diff --git a/src/models/Dao.ts b/src/models/Adapters/DaoAdapter.ts similarity index 54% rename from src/models/Dao.ts rename to src/models/Adapters/DaoAdapter.ts index d222900..f618e45 100644 --- a/src/models/Dao.ts +++ b/src/models/Adapters/DaoAdapter.ts @@ -1,16 +1,46 @@ +import type Schema from 'libs/Schema' +import type { Model, ModelInfer, SchemaInfer } from 'libs/Schema' +import type { Query } from '../Query' + +export interface DBPull { + /** + * total number of rows that are valid with the specified query + */ + rows: number + /** + * total number of rows in the table + */ + rowsTotal: number + + /** + * current page number + */ + page: number + + /** + * total amount of pages + */ + pageTotal: number + + /** + * the data fetched + */ + data: Array> +} + /** - * the Dao is the object that connect the Database or source to the application layer + * the DaoAdapter is the object that connect the Database or source to the application layer * * you MUST call it through the `DaoFactory` file */ -export default abstract class Dao { +export default abstract class DaoAdapter { /** * insert a new object into the source * * @param obj the object to create * @returns the object with it's id filled if create or null otherwise */ - abstract create(obj: Omit): Promise + abstract create(obj: Partial>): Promise | null> /** * insert a new object into the source @@ -18,7 +48,7 @@ export default abstract class Dao { * @param obj the object to create * @returns the object with it's id filled if create or null otherwise */ - public insert: Dao['create'] = (obj: Parameters['create']>[0]) => + public insert: DaoAdapter>['create'] = (obj: Parameters>['create']>[0]) => this.create(obj) /** @@ -27,7 +57,7 @@ export default abstract class Dao { * @param query a partial object which filter depending on the elements, if not set it will fetch everything * @returns an array containing the list of elements that match with the query */ - abstract findAll(query?: Partial): Promise> + abstract read(query?: Query> | undefined): Promise>> /** * find the list of objects having elements from the query @@ -35,8 +65,17 @@ export default abstract class Dao { * @param query a partial object which filter depending on the elements, if not set it will fetch everything * @returns an array containing the list of elements that match with the query */ - public find: Dao['findAll'] = (query: Parameters['findAll']>[0]) => - this.findAll(query) + public findAll: DaoAdapter>['read'] = (query: Parameters>['read']>[0]) => + this.read(query) + + /** + * find the list of objects having elements from the query + * + * @param query a partial object which filter depending on the elements, if not set it will fetch everything + * @returns an array containing the list of elements that match with the query + */ + public find: DaoAdapter>['read'] = (query: Parameters>['read']>[0]) => + this.read(query) /** * find an object by it's id @@ -46,8 +85,8 @@ export default abstract class Dao { * @param id the id of the object * @returns */ - public findById(id: Object['id']): Promise { - return this.findOne({ id: id } as Partial) + public findById(id: ModelInfer['id']): Promise | null> { + return this.findOne({ id: id } as Partial>) } /** @@ -58,7 +97,7 @@ export default abstract class Dao { * @param id the id of the object * @returns */ - public get(id: Object['id']) { + public get(id: ModelInfer['id']) { return this.findById(id) } @@ -68,8 +107,8 @@ export default abstract class Dao { * @param query a partial object which filter depending on the elements, if not set it will fetch everything * @returns the first element matching with the query or null otherwise */ - public async findOne(query?: Partial): Promise { - return (await this.findAll(query))[0] ?? null + public async findOne(query?: Partial>): Promise | null> { + return (await this.findAll(query)).data[0] ?? null } /** @@ -80,14 +119,14 @@ export default abstract class Dao { * @param obj the object to update * @returns an object if it was able to update or null otherwise */ - abstract update(obj: Object): Promise + abstract update(obj: Partial>): Promise | null> /** * change some elements from the object and return the object updated * @param id the id of the object * @param changegs the change to make */ - public async patch(id: string, changes: Partial): Promise { + public async patch(id: ModelInfer['id'], changes: Partial>): Promise | null> { const query = await this.findById(id) if (!query) { return null @@ -100,8 +139,8 @@ export default abstract class Dao { * @returns the object is updated/inserted or null otherwise */ public async upsert( - object: Object | Omit - ): Promise { + object: Partial> + ): Promise | null> { if ('id' in object) { return this.update(object) } @@ -114,5 +153,5 @@ export default abstract class Dao { * * @returns if the object was deleted or not (if object is not in db it will return true) */ - abstract delete(obj: Object): Promise + abstract delete(obj: ModelInfer): Promise } diff --git a/src/models/Adapters/FSAdapter.ts b/src/models/Adapters/FSAdapter.ts new file mode 100644 index 0000000..84455de --- /dev/null +++ b/src/models/Adapters/FSAdapter.ts @@ -0,0 +1,221 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { objectLoop } from '@dzeio/object-util' +import archiver from 'archiver' +import type Schema from 'libs/Schema' +import type { Model, ModelInfer } from 'libs/Schema' +import type SchemaBuffer from 'libs/Schema/Items/SchemaBuffer' +import type SchemaNumber from 'libs/Schema/Items/SchemaNumber' +import type SchemaString from 'libs/Schema/Items/SchemaString' +import fileSystem from 'node:fs' +import fs from 'node:fs/promises' +import type { Query } from '../Query' +import type DaoAdapter from './DaoAdapter' +import type { DBPull } from './DaoAdapter' + +interface FS extends Model { + filename: SchemaString + path: SchemaString + // eslint-disable-next-line no-undef + data: SchemaBuffer + type: SchemaString + size: SchemaNumber +} + +export default class FSAdapter implements DaoAdapter> { + + public constructor( + public readonly schema: Schema, + public readonly basePath: string + ) { + if (basePath.endsWith('/')) { + console.warn('the base path should not end wiath a "/", removing it') + basePath = basePath.slice(0, basePath.lastIndexOf('/')) + } + } + + // TODO: make it clearer what it does + public async create(obj: Partial>): Promise | null> { + const realPath = this.getFullPath(obj.path!) + + const finalFolder = realPath.slice(0, realPath.lastIndexOf('/')) + + console.log('making the directory', finalFolder) + await fs.mkdir(finalFolder, { recursive: true }) + + if (obj.type === 'file') { + console.log('getting the data', finalFolder) + const data = obj.data + + console.log('writing to', realPath) + if ((data as any) instanceof Buffer) { + await fs.writeFile(realPath, data as Buffer) + } else { + await fs.writeFile(realPath, data as string) + } + return obj as ModelInfer + } + + console.log('making the final directory', realPath) + await fs.mkdir(realPath) + return obj as ModelInfer + } + + public async createZippedBufferFromDirectory(directoryPath: string) { + const archive = archiver('zip', {zlib: {level: 9}}) + archive.on('error', (err) => { + throw err + }) + archive.on('warning', (err) => { + if (err.code === 'ENOENT') { + console.log('warning: ', err) + } else { + throw err + } + }) + const fileName = `${this.basePath}/zip/${directoryPath.split(this.basePath)[1]}.zip` + fs.mkdir(fileName.slice(0, fileName.lastIndexOf('/')), {recursive: true}) + const output = fileSystem.createWriteStream(fileName) + archive.pipe(output) + archive.directory(directoryPath, false) + + const timeout = (cb: (value: (value: unknown) => void) => void, interval: number) => () => + new Promise((resolve) => { + setTimeout(() => cb(resolve), interval) + }) + const onTimeout = (seconds: number) => timeout((resolve) => + resolve(`Timed out while zipping ${directoryPath}`), seconds * 1000)() + const error = await Promise.race([archive.finalize(), onTimeout(60)]) + if (typeof error === 'string') { + console.log('Error:', error) + return null + } + return await fs.readFile(fileName) + } + + // eslint-disable-next-line complexity + public async read(query?: Query> | undefined): Promise>> { + + const localPath = query?.path as string ?? '' + + const realPath = this.getFullPath(localPath) + + console.log('get the full path', realPath) + + try { + const stats = await fs.stat(realPath) + + const files: Array> = [] + if (stats.isDirectory()) { + const dirFiles = await fs.readdir(realPath) + // eslint-disable-next-line max-depth + // if (toZip === true) { // put queried file/folder in a zip file + // const buffer = await this.createZippedBufferFromDirectory(realPath) + // // eslint-disable-next-line max-depth + // if (buffer !== null) { + // files.push({ + // path: localPath, + // filename: localPath.slice(localPath.lastIndexOf('/') + 1), + // data: buffer, + // type: 'file', + // size: buffer.length, + // } as ModelInfer) + // } + // } else { // return every sub files + for await (const file of dirFiles) { + files.push(await this.readFile(`${localPath}/${file}`)) + } + // } + } else { + files.push(await this.readFile(localPath)) + } + + const pageLimit = query?.$limit ?? Infinity + const pageOffset = query?.$offset ?? 0 + return { + rows: files.length, + rowsTotal: files.length, + page: Math.floor(pageOffset / pageLimit), + pageTotal: Math.max(1, Math.ceil(files.length / pageLimit)), + data: files.slice(pageOffset, pageOffset + pageLimit) + } + } catch { + return { + rows: 0, + rowsTotal: 0, + page: 0, + pageTotal: 0, + data: [] + } + } + } + + public async update(_obj: ModelInfer): Promise | null> { + throw new Error('not implemented') + } + + public async patch(_id: string, _obj: Partial>): Promise | null> { + throw new Error('not implemented') + + } + + public async delete(obj: ModelInfer): Promise { + const localPath = obj?.path as string ?? '' + const realPath = this.getFullPath(localPath) + + try { + const stats = await fs.stat(realPath) + if (!stats) { + return false + } + fs.rm(realPath, { recursive: true, force: true }) + return true + } catch { + console.error('Could not remove file', localPath) + return false + } + } + + private getFullPath(localPath?: string): string { + if (localPath && !localPath?.startsWith('/')) { + console.warn('Your path should start with a "/", adding it') + localPath = (`/${localPath}`) + } + + let realPath = this.basePath + (localPath ? localPath : '') + + if (realPath.includes('\\')) { + realPath = realPath.replace(/\\/g, '/') + } + + return realPath + } + + private async readFile(localPath: string): Promise> { + + const path = this.getFullPath(localPath) + console.log('reading file at', path) + const stats = await fs.stat(path) + const type = stats.isFile() ? 'file' : 'directory' + console.log('file is a', type) + + const obj: ModelInfer = { + path: localPath, + filename: localPath.slice(localPath.lastIndexOf('/') + 1), + data: type === 'file' ? await fs.readFile(path) : '', + type: type, + size: stats.size + } as any + + objectLoop(this.schema.model, (item, key) => { + if (item.attributes.includes('db:created')) { + // @ts-expect-error things get validated anyway + obj[key] = stats.ctime + } else if (item.attributes.includes('db:updated')) { + // @ts-expect-error things get validated anyway + obj[key] = stats.mtime + } + }) + + return obj + } +} diff --git a/src/models/Adapters/LDAPAdapter.ts b/src/models/Adapters/LDAPAdapter.ts new file mode 100644 index 0000000..f7ff9c4 --- /dev/null +++ b/src/models/Adapters/LDAPAdapter.ts @@ -0,0 +1,221 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { objectClone, objectLoop, objectMap, objectOmit, objectRemap } from '@dzeio/object-util' +import ldap from 'ldapjs' +import type Schema from 'libs/Schema' +import type { SchemaInfer } from 'libs/Schema' +import type DaoAdapter from 'models/Adapters/DaoAdapter' +import type { DBPull } from 'models/Adapters/DaoAdapter' +import type { Query } from 'models/Query' +import { filter } from './AdapterUtils' +type LDAPFields = 'uid' | 'mail' | 'givenname' | 'sn' | 'jpegphoto' | 'password' + +export default class LDAPAdapter implements DaoAdapter { + + private reverseReference: Partial> = {} + private attributes: Array = [] + + public constructor( + public readonly schema: T, + public readonly options: { + url: string + dnSuffix: string + admin: { + dn?: string | undefined + username?: string | undefined + password: string + } + fieldsCorrespondance?: Partial, LDAPFields | string>> + } + ) { + objectLoop(options.fieldsCorrespondance ?? {}, (value, key) => { + this.reverseReference[value] = key + this.attributes.push(value) + }) + } + + // TODO: make it clearer what it does + public async create(_obj: Partial>): Promise | null> { + throw new Error('not implemented') + } + + // eslint-disable-next-line complexity + public async read(query?: Query> | undefined): Promise> { + const passwordField = this.options.fieldsCorrespondance?.password ?? 'password' + const doLogin = !!query?.[passwordField] + + const emptyResult = { + rows: 0, + rowsTotal: 0, + page: 1, + pageTotal: 0, + data: [] + } + + if (!query) { + return emptyResult + } + + // console.log(await this.ldapFind({mail: 'f.bouillon@aptatio.com'})) + + const userdn = objectMap(query, (value, key) => `${(this.options.fieldsCorrespondance as any)[key] ?? key}=${value}`) + ?.filter((it) => it.slice(0, it.indexOf('=')) !== passwordField) + ?.join(',') + if (!doLogin) { + const bind = this.options.admin.dn ?? `cn=${this.options.admin.username},${this.options.dnSuffix}` + try { + const client = await this.bind(bind, this.options.admin.password) + // @ts-expect-error nique ta mere + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const results = (await this.ldapFind(client, objectMap(query, (value, key) => ({key: this.options.fieldsCorrespondance?.[key], value: value})) + )).map((it) => this.schema.parse( + objectRemap(it, (value, key) => ({key: this.reverseReference[key.toLowerCase() as string] as string, value: value})) + )).filter((it): it is SchemaInfer => !!it) + + const res = filter(query, results) + + return { + rows: res.filtered.length, + rowsTotal: results.length, + page: 1, + pageTotal: 1, + data: res.filtered + } + } catch { + return emptyResult + } + } + + // password authentication + try { + const clone = objectClone(query) + delete clone.password + + // find using admin privileges + const res = await this.read(clone) + const user = res.data[0] + if (!user) { + return emptyResult + } + const password = query.password as string ?? '' + const client = await this.bind(`uid=${user[this.reverseReference.uid as keyof typeof user]!},${this.options.dnSuffix}`, password) + // @ts-expect-error nique x2 + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const results = (await this.ldapFind(client, objectMap(clone, (value, key) => { + const finalKey = this.options.fieldsCorrespondance?.[key] + + return {key: finalKey, value: value} + }) + )).map((it) => this.schema.parse( + objectRemap(it, (value, key) => ({ key: this.reverseReference[key as string] as string, value: value })) + )).filter((it): it is SchemaInfer => !!it) + + const final = filter(objectOmit(query, 'password'), results) + // console.log(final, query, results) + + if (final.filtered.length !== 1) { + return emptyResult + } + + return { + rows: final.filtered.length, + rowsTotal: results.length, + page: 1, + pageTotal: 1, + data: final.filtered + } + + } catch (e) { + console.log('error, user not found', e) + return emptyResult + } + } + + public async update(_obj: Partial>): Promise | null> { + throw new Error('not implemented') + } + + public async patch(_id: string, _obj: Partial>): Promise | null> { + throw new Error('not implemented') + } + + public async delete(_obj: Partial>): Promise { + throw new Error('not implemented') + } + + private bind(dn: string, password: string): Promise { + const client = ldap.createClient({ + url: this.options.url + }) + return new Promise((res, rej) => { + console.log('binding as', dn) + client.on('connect', () => { + client.bind(dn, password, (err) => { + if (err) { + console.error('error binding as', dn, err) + client.unbind() + rej(err) + return + } + console.log('binded as', dn) + res(client) + }) + }) + .on('timeout', (err) => rej(err)) + .on('connectTimeout', (err) => rej(err)) + .on('error', (err) => rej(err)) + .on('connectError', (err) => rej(err)) + }) + } + + private async ldapFind(client: ldap.Client, filters: Array<{key: LDAPFields, value: string}>): Promise | undefined>>> { + + if (filters.length === 0) { + return [] + } + const firstFilter = filters.shift()! + return new Promise | undefined>>>((res, rej) => { + const users: Array | undefined>> = [] + const filter = { + attribute: firstFilter.key as any, + value: firstFilter.value, + } + console.log('Searching on LDAP') + client.search( + this.options.dnSuffix, { + filter: new ldap.EqualityFilter(filter), + // filter: `${filter.attribute}:caseExactMatch:=${filter.value}`, + scope: 'sub', + attributes: this.attributes + }, (err, search) => { + if (err) { + rej(err) + } + // console.log('search', search, err) + search.on('searchEntry', (entry) => { + users.push(this.parseUser(entry)) + }).on('error', (err2) => { + rej(err2) + client.unbind() + console.error('error in search lol', err2) + }).on('end', () => { + console.log(users) + res(users) + + client.unbind() + }) + } + ) + }) + } + + private parseUser(usr: ldap.SearchEntry): Record | undefined> { + const user: Record | undefined> = { dn: usr.objectName ?? undefined } + + for (const attribute of usr.attributes) { + user[attribute.type] = attribute.values.length === 1 ? attribute.values[0] : attribute.values + + } + + return user + } +} diff --git a/src/models/Adapters/MultiAdapter.ts b/src/models/Adapters/MultiAdapter.ts new file mode 100644 index 0000000..05dfa76 --- /dev/null +++ b/src/models/Adapters/MultiAdapter.ts @@ -0,0 +1,69 @@ +import type Schema from 'libs/Schema' +import type { SchemaInfer } from 'libs/Schema' +import type DaoAdapter from 'models/Adapters/DaoAdapter' + +export default class MultiAdapter implements DaoAdapter { + + public constructor( + public readonly schema: T, + public readonly adapters: Array<{ + adapter: DaoAdapter + fields: Array + /** + * a field from the main adapter that will backreference the child adapter + */ + childReference?: keyof T + }> = [] + ) {} + + // TODO: make it clearer what it does + public async create(obj: Partial>): Promise | null> { + let final: SchemaInfer = {} as any + // start by processing the childs + for (const adapter of this.adapters.sort((a) => a.childReference ? -1 : 1)) { + const partialObject: Partial> = {} + for (const key of adapter.fields) { + partialObject[key] = obj[key] + } + const res = await adapter.adapter.create!(partialObject as any) + if (res && adapter.childReference) { + obj[adapter.childReference] = res[adapter.childReference] + } + final = {...final, ...res} + } + return final + } + + // eslint-disable-next-line complexity + // public async read(query?: Query> | undefined): Promise> { + // let final: SchemaInfer = {} as any + // // start by processing the childs + // for (const adapter of this.adapters.sort((a) => a.childReference ? -1 : 1)) { + // const partialObject: Partial> = {} + // for (const key of adapter.fields) { + // partialObject[key] = obj[key] + // } + // const res = await adapter.adapter.read!(query) + // if (res && adapter.childReference) { + // obj[adapter.childReference] = res[adapter.childReference] + // } + // final = {...final, ...res} + // } + // // step 2 merge elements + // return final + // } + + public async update(_obj: Partial>): Promise | null> { + throw new Error('not implemented') + } + + public async patch(_id: string, _obj: Partial>): Promise | null> { + throw new Error('not implemented') + + } + + public async delete(_obj: Partial>): Promise { + throw new Error('not implemented') + + } +} diff --git a/src/models/Adapters/PostgresAdapter.ts b/src/models/Adapters/PostgresAdapter.ts new file mode 100644 index 0000000..e2ac21b --- /dev/null +++ b/src/models/Adapters/PostgresAdapter.ts @@ -0,0 +1,324 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ +import { objectFind, objectKeys, objectLoop, objectMap, objectRemap, objectValues } from '@dzeio/object-util' + +import type Schema from 'libs/Schema' +import type { SchemaInfer } from 'libs/Schema' +import type SchemaItem from 'libs/Schema/SchemaItem' +import crypto from 'node:crypto' +import PostgresClient from '../Clients/PostgresClient' +import { Sort, type Query } from '../Query' +import { filter } from './AdapterUtils' +import type { DBPull } from './DaoAdapter' +import DaoAdapter from './DaoAdapter' + +const specialKeywords = ['user', 'end'] as const + +export default class PostgresAdapter extends DaoAdapter { + + private id: Array = [] + + public constructor( + /** + * the schema used by Cassandra + */ + public readonly schema: T, + /** + * the table name + */ + public readonly table: string, + + /** + * additionnal options to make the adapter work + */ + private readonly options?: { + /** + * log the requests made to cassandra + */ + debug?: boolean + } + ) { + super() + objectLoop(this.schema.model, (schema, key) => { + if (schema.attributes.includes('db:auto')) { + this.id.push(key) + } + }) + } + + // TODO: make it clearer what it does + public async create(obj: Partial>): Promise | null> { + // handle automated values + objectLoop(this.schema.model, (item, key) => { + if (item.attributes.includes('db:created') || item.attributes.includes('db:updated')) { + // @ts-expect-error things get validated anyway + obj[key] = new Date() + } else if (item.attributes.includes('db:auto') && !obj[key]) { + if (item.isOfType('')) { + // @ts-expect-error things get validated anyway + obj[key] = crypto.randomBytes(16).toString('hex') + } else if (item.isOfType(123)) { + // @ts-expect-error things get validated anyway + obj[key] = crypto.randomBytes(16).readUint32BE() + } else { + throw new Error('cannot generate ID because it is not compatible with it') + } + } + }) + + // parse the data with the Schema + const { object: clone, error} = this.schema.validate(obj) + if (error) { + console.error(error) + throw new Error('Invalid data given to create the final object') + } + + // prepare the database query + const keys = objectKeys(clone) + .map((it) => { + if (specialKeywords.includes(it)) { // handle the special keyword + return `"${it}"` + } + return it + }) + const keysStr = keys.join(', ') + const values = keys.map((_, idx) => `$${idx+1}`).join(', ') + const req = `INSERT INTO ${this.table} (${keysStr}) VALUES (${values});` + const client = await PostgresClient.get() + + const params = objectMap(clone as any, (value, key) => this.valueToDB(key as any, value)) + + if (this.options?.debug) { + console.log(req, params) // 27 from 1 36 from 0 + } + + // send to the database + try { + await client.execute(req, params) + } catch (e) { + console.log(e, req, params) + return null + } + return this.schema.validate(clone).object ?? null + } + + // eslint-disable-next-line complexity + public async read(query?: Query> | undefined): Promise> { + // prepare the request to the database based on the query parameters + let req: Array = ['SELECT', '*', 'FROM', this.table] + + const client = await PostgresClient.get() + + if (this.options?.debug) { + console.log(req) + } + + // read from the database + let res: Array> + try { + res = await client.execute(`${req.join(' ')}`) + } catch (error) { + console.error('error running request') + console.error(req) + throw error + } + if (!res) { + return { + rows: 0, + pageTotal: 0, + page: 1, + rowsTotal: 0, + data: [] + } + } + + if (this.options?.debug) { + console.log('preEdits', res) + } + + // post-process the data from the database + const raw = res + .map((obj) => { + // remap to use system value instead of db values + obj = objectRemap(this.schema.model, (_, key) => ({ + key, + value: this.dbToValue(key as any, (obj as any)[key]) + })) + + // validate the schema + const res = this.schema.validate(obj) + if (res.object) { + return res.object + } + console.log(res.error) + return null + }) + .filter((it): it is SchemaInfer => !!it) + + // temp modification of comportement to use the new and better query system + if ((!query || !query?.$sort) && objectFind(this.schema.model, (_, key) => key === 'created')) { + // temp fix for the sorting algorithm + if (!query) { + // @ts-expect-error normal currently + query = { $sort: { created: Sort.DESC }} + } else { + query.$sort = { created: Sort.DESC } + } + } + let dataset = raw + + + if (this.options?.debug) { + console.log('preFilters', dataset) + } + + if (query) { + dataset = filter(query, dataset, this.options).filtered + } + return { + rows: dataset.length ?? 0, + rowsTotal: res.length ?? 0, + page: 1, + pageTotal: 1, + // page: page, + // pageTotal: pageLimit ? res.rowLength / pageLimit : 1, + data: dataset + } + } + + public async update(obj: SchemaInfer): Promise | null> { + return this.patch(obj) + } + + public async patch(id: Partial>): Promise | null> + public async patch(id: string, obj: Partial>): Promise | null> + // eslint-disable-next-line complexity + public async patch(id: string | Partial>, obj?: Partial>): Promise | null> { + + if (!obj) { + if (typeof id === 'string') { + return null + } + obj = {...id} as Partial> + } + + // const tmp = this.schema.validate(obj) + // // if (tmp.error) { + // // throw new Error(`obj invalid can\'t patch ${JSON.stringify(tmp.error)}`) + // // } + + // obj = tmp.object + + // update the updated time + objectLoop(this.schema.model, (item, key) => { + if (item.attributes.includes('db:updated')) { + // @ts-expect-error things get validated anyway + obj[key] = new Date() + } + }) + + // build the request parts + const parts: Array = ['UPDATE', this.table, 'SET'] + const params: Array = [] + + // remove ids + for (const tmp of this.id) { + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete obj[tmp] + } + + // map the items to update + const keys = objectMap(obj as {}, (_, key, idx) => { + if (specialKeywords.includes(key)) { + return `"${key}"=$${idx+1}` + } + + return `${key}=$${idx+1}` + }) + parts.push(keys.join(', ')) + params.push(...objectValues(obj as {})) + + // filter by the ids + parts.push('WHERE') + const read: Partial = {} + for (let idx = 0; idx < this.id.length; idx++) { + const key = this.id[idx] as string + + if (idx > 0) { + parts.push('AND') + } + parts.push(`${key}=$${params.length+1}`) + const value = obj[key] ?? (typeof id === 'string' ? id : id[key]) + read[key] = this.valueToDB(key as any, value) + if (!value) { + throw new Error(`Missing id (${key})`) + } + params.push(value) + } + + const req = parts.join(' ') + const client = await PostgresClient.get() + + if (this.options?.debug) { + console.log(req, params) + } + + try { + const res = await client!.execute(req, params) + // console.log(res, req) + if (this.options?.debug) { + console.log('post patch result', res, req) + } + return (await this.read(read)).data[0] ?? null + } catch (e) { + console.log(e, req, params) + } + return null + } + + public async delete(obj: SchemaInfer): Promise { + const parts = ['DELETE', 'FROM', this.table, 'WHERE'] + + objectLoop(obj as {}, (value, key, idx) => { + if (idx > 0) { + parts.push('AND') + } + parts.push(`${key}=${value}`) + }) + + const client = await PostgresClient.get() + + if (this.options?.debug) {} + + try { + await client!.execute(`${parts.join(' ')}`) + } catch (e) { + console.error(e, parts) + throw e + } + return true + } + + private valueToDB(key: keyof T, value: any): string | number | boolean | Date { + const item: SchemaItem = (this.schema.model as any)[key] + + if (item.isOfType({})) { + return JSON.stringify(value) + } + + return value + } + + private dbToValue(key: keyof T, value: string | number | boolean | Date): any { + const item: SchemaItem = (this.schema.model as any)[key] + + if (item.isOfType(543) && typeof value === 'string') { + return parseFloat(value) + } + + if (item.isOfType({}) && typeof value === 'string') { + return JSON.parse(value) + } + + return value + } +} diff --git a/src/models/Clients/CassandraClient.ts b/src/models/Clients/CassandraClient.ts new file mode 100644 index 0000000..906195a --- /dev/null +++ b/src/models/Clients/CassandraClient.ts @@ -0,0 +1,130 @@ +import { objectRemap } from '@dzeio/object-util' +import Cassandra from 'cassandra-driver' +import { getEnv, requireEnv } from 'libs/Env' +import Client from './Client' + +export default class CassandraClient extends Client { + + private static instance: CassandraClient | null = null + private client?: Cassandra.Client | null = null + + + public async getVersion(): Promise { + try { + await this.execute(`USE ${requireEnv('CASSANDRA_DATABASE')}`) + } catch (e) { + // database not found + console.log('database not found', e) + return -1 + } + try { + const res = await this.execute('SELECT value FROM settings WHERE id = \'db_version\'') + const value = res[0]?.value + if (value.includes('T')) { + return new Date(value).getTime() + } + return Number.parseInt(value) + } catch (e) { + // table does not exists + console.log('Settings table does not exists', e) + return -1 + } + } + + public override async setVersion(version: number): Promise { + await this.execute(` + UPDATE settings SET value = ? WHERE id = 'db_version'; + `.trim(), [version.toString()]) + } + + public async execute(query: string, params?: Array | object, options?: Cassandra.QueryOptions): Promise>> { + if (!this.client || this.client.getState().getConnectedHosts().length === 0) { + throw new Error('not connected to the database !') + } + + const res = await this.client.execute(query, params, options) + // if (query.includes('users')) + // console.log(res) + + + return res.rows?.map((it) => objectRemap(it.keys(), (key: string) => ({key: key, value: it.get(key)}))) ?? [] + } + + /** + * get the connexion to cassandra, it will try until it succedeed + */ + public static async get() { + const client = CassandraClient.instance ?? new CassandraClient() + CassandraClient.instance = client + return client + } + + /** + * connect to Cassandra + */ + // eslint-disable-next-line complexity + public async connect() { + if (await this.isReady()) { + return + } + + console.log('connecting to cassandra') + let authProvider: Cassandra.auth.AuthProvider|undefined + + const method = getEnv('CASSANDRA_AUTH_METHOD') + if (method) { + // eslint-disable-next-line max-depth + switch (method.toLowerCase()) { + case 'passwordauthenticator': + case 'plaintext': + authProvider = new Cassandra.auth.PlainTextAuthProvider( + requireEnv('CASSANDRA_USERNAME'), + requireEnv('CASSANDRA_PASSWORD') + ) + break + case 'dseplaintext': + authProvider = new Cassandra.auth.DsePlainTextAuthProvider( + requireEnv('CASSANDRA_USERNAME'), + requireEnv('CASSANDRA_PASSWORD'), + getEnv('CASSANDRA_AUTHORIZATION_ID') + ) + break + case 'none': + break + default: + console.error('Please use a valid CASSANDRA_AUTH_METHOD value (none|plaintext|dseplaintext)') + throw new Error('Please use a valid CASSANDRA_AUTH_METHOD value (none|plaintext|dseplaintext)') + } + } + + this.client = new Cassandra.Client({ + contactPoints: [requireEnv('CASSANDRA_CONTACT_POINT')], + authProvider: authProvider as Cassandra.auth.AuthProvider, + localDataCenter: getEnv('CASSANDRA_LOCAL_DATA_CENTER', 'datacenter1') + }) + // this.client.on('log', (level, loggerName, message, furtherInfo) => { + // console.log(`${level} - ${loggerName}: ${message}`); + // }) + + try { + await this.client.connect() + } catch (e) { + this.client = null + console.error(e) + throw new Error('Error connecting to Cassandra') + } + // try { + // await Migration.migrateToLatest() + // } catch (e) { + // this.migrated = -1 + // console.error(e) + // throw new Error('An error occured while migrating') + // } + // this.migrated = 1 + + } + + public async isReady(): Promise { + return !!this.client && this.client.getState().getConnectedHosts().length >= 1 + } +} diff --git a/src/models/Clients/Client.ts b/src/models/Clients/Client.ts new file mode 100644 index 0000000..6221ac4 --- /dev/null +++ b/src/models/Clients/Client.ts @@ -0,0 +1,137 @@ +import config from 'models/config' +import type MigrationObj from 'models/Migrations' + +export enum ConnectionStatus { + DISCONNECTED, + MIGRATING, + READY +} + +export interface ClientStatic { + get(): Promise +} + +export default abstract class Client { + + + public status: ConnectionStatus = ConnectionStatus.DISCONNECTED + + /** + * -1: unknown + * 0: migrating + * 1: migrated + */ + public migrationStatus = -1 + + /** + * get the current migration version + * + * -1 nothing/error + * 0+ current migration + */ + public abstract getVersion(): Promise + public abstract setVersion(version: number): Promise + + public abstract execute(query: string, params?: Array | object, ...options: Array): Promise>> + + public abstract connect(): Promise + + /** + * Migrate the database to the latest version + */ + public async migrateToLatest() { + const migrations = this.getMigrations() + const latest = migrations[migrations.length - 1] + if (!latest) { + return + } + return await this.migrateTo(latest.date) + } + + public getMigrations(): ReadonlyArray { + return config.migrations as ReadonlyArray + } + + /** + * migrate to a specific date in time + * @param date the date to try to migrate to + */ + public async migrateTo(date: number) { + this.migrationStatus = 0 + + let version = await this.getVersion() + + const migrations = this.getMigrations() + + const time = !version ? -1 : version + + console.log('Current DB version', version) + // same version, don't to anything + if (date === time) { + this.migrationStatus = 1 + return + } + console.log('\x1b[35mCurrent DB version', version, '\x1b[0m') + + // run up migrations + if (time < date) { + console.log('\x1b[35m', 'Migrating up to', date, '\x1b[0m') + const migrationsToRun = migrations.filter((it) => it.date > time && it.date <= date) + for (const migration of migrationsToRun) { + console.log('\x1b[35m', 'Migrating from', version, 'to', migration.date, '\x1b[0m') + await migration.up(this) + await this.setVersion(migration.date) + version = migration.date + } + } else { // run down migrations + console.log('\x1b[35m', 'Migrating down to', date, '\x1b[0m') + const migrationsToRun = migrations.filter((it) => it.date < time && it.date >= date) + .toReversed() + for (const migration of migrationsToRun) { + console.log('\x1b[35m', 'Migrating from', version, 'to', migration.date, '\x1b[0m') + await migration.down?.(this) + await this.setVersion(migration.date) + version = migration.date + } + } + console.log('\x1b[32mDone migrating\x1b[0m') + this.migrationStatus = 1 + } + + // public getStatus(): Promise + + // public abstract isMigrated(): Promise + + /** + * indicate if the client is ready for new requests (not if migrations are done or not) + */ + public abstract isReady(): Promise + + /** + * wait until every migrations are done or fail + */ + public async waitForMigrations(): Promise { + if (this.migrationStatus === -1) { + await this.migrateToLatest() + } + while (!await this.isMigrated()) { + console.log('waiting...') + await new Promise((res) => setTimeout(res, 100)) + } + } + + public async isMigrated(): Promise { + return this.migrationStatus === 1 + // if (this.migrationStatus < 1) { + // return false + // } else if (this.migrationStatus === 1) { + // return + // } + // const migrations = this.getMigrations() + // const last = migrations[migrations.length - 1] + // if (!last) { + // return true + // } + // return last.date === await this.getVersion() + } +} diff --git a/src/models/Clients/PostgresClient.ts b/src/models/Clients/PostgresClient.ts new file mode 100644 index 0000000..f3d2a3d --- /dev/null +++ b/src/models/Clients/PostgresClient.ts @@ -0,0 +1,71 @@ +import { wait } from 'libs/AsyncUtils' +import { getEnv, requireEnv } from 'libs/Env' +import pg from 'pg' +import Client from '.' +const Postgres = pg.Client + +// biome-ignore lint/complexity/noStaticOnlyClass: +export default class PostgresClient extends Client { + private static instance: PostgresClient = new PostgresClient() + private client?: pg.Client | null + public override async getVersion(): Promise { + try { + const res = await this.execute(`SELECT value FROM settings WHERE id = 'db_version'`) + + const value = res[0]?.value + if (!value) { + return -1 + } + return Number.parseInt(value) + } catch (e) { + // table does not exists + console.log('Settings table does not exists', e) + return -1 + } + } + public override async setVersion(version: number): Promise { + await this.execute(`UPDATE settings SET value = $1 WHERE id = 'db_version';`, [version.toString()]) + } + public override async execute(query: string, params?: Array | object, ...options: Array): Promise>> { + if (!this.client || !await this.isReady()) { + throw new Error('not connected') + } + const res = await this.client.query>(query, params) + return res.rows + } + public override async connect(): Promise { + if (this.client) { + return + } + this.client = new Postgres({ + host: requireEnv('POSTGRES_HOST'), + user: requireEnv('POSTGRES_USERNAME'), + password: requireEnv('POSTGRES_PASSWORD'), + port: parseInt(getEnv('POSTGRES_PORT', '5432')), + database: requireEnv('POSTGRES_DATABASE', 'projectmanager'), + // debug(connection, query, parameters, paramTypes) { + // console.log(`${query}, ${parameters}`); + // }, + }) + .on('end', () => { + this.client = null + }) + try { + await this.client.connect() + } catch (e) { + this.client = null + console.error(e) + throw new Error('Error connecting to Postgres') + } + } + public override async isReady(): Promise { + return !!this.client + } + + /** + * get the connexion to cassandra, it will try until it succedeed + */ + public static async get() { + return PostgresClient.instance + } +} diff --git a/src/models/DaoFactory.ts b/src/models/DaoFactory.ts index fbdce8f..1ebd607 100644 --- a/src/models/DaoFactory.ts +++ b/src/models/DaoFactory.ts @@ -1,24 +1,19 @@ -/** - * TODO: - * Add to `DaoItem` your model name - * Add to the function `initDao` the Dao - */ - -/** - * the different Daos that can be initialized - * - * Touch this interface to define which key is linked to which Dao - */ -interface DaoItem {} +import type { default as Dao, default as DaoAdapter } from './Adapters/DaoAdapter' +import config from './config' /** * Class to get any DAO */ + +// biome-ignore lint/complexity/noStaticOnlyClass: export default class DaoFactory { /** - * reference of the different Daos for a correct singleton implementation + * get the total list of daos available + * @returns return the list of daos available */ - private static daos: Partial = {} + public static getAll(): Record { + return config.models + } /** * Get a a dao by its key @@ -28,27 +23,15 @@ export default class DaoFactory { * @param key the dao key to get * @returns the Dao you want as a singleton */ - public static get(key: Key): DaoItem[Key] { - if (!(key in this.daos)) { - const dao = this.initDao(key) - if (!dao) { - throw new Error(`${key} has no valid Dao`) - } - this.daos[key] = dao as DaoItem[Key] - } - return this.daos[key] as DaoItem[Key] + public static get(key: Key): typeof config['models'][Key] { + return config.models[key] } /** - * init a dao by its key, it does not care if it exists or not - * - * @param item the element to init - * @returns a new initialized dao or undefined if no dao is linked + * get the main client linked to migrations + * @returns the main client */ - private static initDao(item: keyof DaoItem): any | undefined { - switch (item) { - default: - return undefined - } + public static async client(): ReturnType<(typeof config.mainClient)['get']> { + return config.mainClient.get() } } diff --git a/src/models/Migrations/Example.ts b/src/models/Migrations/Example.ts new file mode 100644 index 0000000..01075b9 --- /dev/null +++ b/src/models/Migrations/Example.ts @@ -0,0 +1,35 @@ +import type Client from 'models/Clients/Client' +import type Migration from './Migration' + +/** + * A system migration + * the file need then to be imported into the `models/config.ts` file + */ +export default { + /** SET THE DATE IN ISO FORMAT HERE */ + date: Date.UTC(2024, 3, 26, 11, 55, 28), + async up(client: Client): Promise { + const requests: Array = [ + + ] + + for await (const request of requests) { + await client.execute(request) + } + + return true + }, + async down(client: Client) { + const requests: Array = [ + + ] + + for await (const request of requests) { + try { + await client.execute(request) + } catch {} + } + + return true + }, +} as Migration diff --git a/src/models/Migrations/Migration.d.ts b/src/models/Migrations/Migration.d.ts new file mode 100644 index 0000000..2006448 --- /dev/null +++ b/src/models/Migrations/Migration.d.ts @@ -0,0 +1,10 @@ +import type Client from 'models/Clients/Client' + +export default interface Migration { + /** + * timestamp in UTC + */ + date: number + up(client: Client): Promise + down?(client: Client): Promise +} diff --git a/src/models/Query.ts b/src/models/Query.ts new file mode 100644 index 0000000..4bebfe5 --- /dev/null +++ b/src/models/Query.ts @@ -0,0 +1,149 @@ +interface QueryRootFilters> { + /** + * one of the results should be true to be true + */ + $or?: Array> + /** + * every results should be false to be true + */ + $nor?: Array> + /** + * (default) make sure every sub queries return true + */ + $and?: Array> + /** + * at least one result must be false + */ + $nand?: Array> + /** + * invert the result from the following query + */ + $not?: QueryList + /** + * define a precise offset of the data you fetched + */ + $offset?: number + /** + * limit the number of elements returned from the dataset + */ + $limit?: number + /** + * sort the data the way you want with each keys being priorized + * + * ex: + * {a: Sort.DESC, b: Sort.ASC} + * + * will sort first by a and if equal will sort by b + */ + $sort?: SortInterface +} + +/** + * Logical operators that can be used to filter data + */ +export type QueryLogicalOperator = { + /** + * one of the results should be true to be true + */ + $or: Array> +} | { + /** + * every results should be false to be true + */ + $nor: Array> +} | { + /** + * at least one result must be false + */ + $nand: Array> +} | { + /** + * (default) make sure every sub queries return true + */ + $and: Array> +} | { + /** + * invert the result from the following query + */ + $not: QueryValues +} + +/** + * differents comparisons operators that can be used to filter data + */ +export type QueryComparisonOperator = { + /** + * the remote source value must be absolutelly equal to the proposed value + */ + $eq: Value | null +} | { + /** + * the remote source value must be greater than the proposed value + */ + $gt: number | Date +} | { + /** + * the remote source value must be lesser than the proposed value + */ + $lt: number | Date +} | { + /** + * the remote source value must be greater or equal than the proposed value + */ + $gte: number | Date +} | { + /** + * the remote source value must be lesser or equal than the proposed value + */ + $lte: number | Date +} | { + /** + * the remote source value must be one of the proposed values + */ + $in: Array +} | { + /** + * (for string only) part of the proposed value must be in the remote source + */ + $inc: Value | null +} + +export type QueryList> = { + [Key in keyof Obj]?: QueryValues +} + +/** + * Differents values the element can take + * if null it will check if it is NULL on the remote + * if array it will check oneOf + * if RegExp it will check if regexp match + */ +export type QueryValues = Value | + null | + Array | + RegExp | + QueryComparisonOperator | + QueryLogicalOperator + +/** + * The query element that allows you to query different elements + */ +export type Query> = QueryList & QueryRootFilters + +/** + * sorting interface with priority + */ +export type SortInterface> = { + [Key in keyof Obj]?: Sort +} + +export enum Sort { + /** + * Sort the values from the lowest to the largest + */ + ASC, + /** + * Sort the values form the largest to the lowest + */ + DESC +} diff --git a/src/models/config.ts b/src/models/config.ts new file mode 100644 index 0000000..a60650d --- /dev/null +++ b/src/models/config.ts @@ -0,0 +1,48 @@ +import Schema from 'libs/Schema' +import type Dao from './Adapters/DaoAdapter' +import PostgresAdapter from './Adapters/PostgresAdapter' +import CassandraClient from './Clients/CassandraClient' +import type { ClientStatic } from './Clients/Client' +import type Migration from './Migrations/Migration' + +// @ts-ignore +interface Config { + /** + * the main client is responsible for the Migration system + */ + mainClient: ClientStatic + + /** + * define every models of the application + */ + models: Record + + /** + * Define the application migrations + */ + migrations: Array +} + +const config = { + /** + * the main client is responsible for the Migration system + */ + mainClient: CassandraClient as ClientStatic, + + /** + * define every models of the application + */ + models: { + session: new PostgresAdapter(new Schema({}), 'pouet') + // session: new Dao(Session, new CassandraAdapter(Session, 'Session', 'id')), + }, + + /** + * Define the application migrations + */ + migrations: [ + // Migration20240326115528 + ] +} as const + +export default config diff --git a/vitest.config.ts b/vitest.config.ts index 9fee874..a762a8a 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -1,10 +1,9 @@ /// import { getViteConfig } from 'astro/config' -// import { configDefaults } from 'vitest/config' export default getViteConfig({ test: { - include: ['./tests/**.ts'] + include: ['./tests/**/*.ts'] /* for example, use global to avoid globals imports (describe, test, expect): */ // globals: true, }