BREAKING!: update libraries

This commit is contained in:
monoid 2024-10-06 00:32:05 +09:00
parent 4669e1c0d4
commit 84c0f7243e
44 changed files with 4352 additions and 3674 deletions

1
.npmrc Normal file
View File

@ -0,0 +1 @@
link-workspace-packages=true

View File

@ -1,5 +1,6 @@
import { atom, useAtomValue, setAtomValue } from "../lib/atom.ts";
import { makeApiUrl } from "../hook/fetcher.ts";
import { LoginRequest } from "dbtype/mod.ts";
type LoginLocalStorage = {
username: string;
@ -77,10 +78,7 @@ export const doLogout = async () => {
};
}
};
export const doLogin = async (userLoginInfo: {
username: string;
password: string;
}): Promise<string | LoginLocalStorage> => {
export const doLogin = async (userLoginInfo: LoginRequest): Promise<string | LoginLocalStorage> => {
const u = makeApiUrl("/api/user/login");
const res = await fetch(u, {
method: "POST",
@ -99,6 +97,20 @@ export const doLogin = async (userLoginInfo: {
return b;
};
export const doResetPassword = async (username: string, oldpassword: string, newpassword: string) => {
const u = makeApiUrl("/api/user/reset");
const res = await fetch(u, {
method: "POST",
body: JSON.stringify({ username, oldpassword, newpassword }),
headers: { "content-type": "application/json" },
credentials: "include",
});
const b = await res.json();
if (res.status !== 200) {
return b.detail as string;
}
return b;
}
export async function getInitialValue() {
const user = getUserSessions();

View File

@ -1,53 +0,0 @@
import type { JSONMap } from './jsonmap';
export interface DocumentBody {
title: string;
content_type: string;
basepath: string;
filename: string;
modified_at: number;
content_hash: string | null;
additional: JSONMap;
tags: string[]; // eager loading
}
export interface Document extends DocumentBody {
readonly id: number;
readonly created_at: number;
readonly deleted_at: number | null;
}
export type QueryListOption = {
/**
* search word
*/
word?: string;
allow_tag?: string[];
/**
* limit of list
* @default 20
*/
limit?: number;
/**
* use offset if true, otherwise
* @default false
*/
use_offset?: boolean;
/**
* cursor of documents
*/
cursor?: number;
/**
* offset of documents
*/
offset?: number;
/**
* tag eager loading
* @default true
*/
eager_loading?: boolean;
/**
* content type
*/
content_type?: string;
};

2
packages/dbtype/mod.ts Normal file
View File

@ -0,0 +1,2 @@
export * from "./src/api.ts";
export * as db from "./src/types.ts";

View File

@ -2,7 +2,7 @@
"name": "dbtype",
"version": "1.0.0",
"description": "",
"main": "index.js",
"main": "mod.ts",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
@ -13,6 +13,11 @@
"@types/better-sqlite3": "^7.6.9",
"better-sqlite3": "^9.4.3",
"kysely": "^0.27.3",
"kysely-codegen": "^0.14.1"
"kysely-codegen": "^0.14.1",
"typescript": "^5.4.3"
},
"type": "module",
"dependencies": {
"zod": "^3.23.8"
}
}

102
packages/dbtype/src/api.ts Normal file
View File

@ -0,0 +1,102 @@
import { z } from "zod";
export { ZodError } from "zod";
export const DocumentBodySchema = z.object({
title: z.string(),
content_type: z.string(),
basepath: z.string(),
filename: z.string(),
modified_at: z.number(),
content_hash: z.string(),
additional: z.record(z.unknown()),
tags: z.array(z.string()),
pagenum: z.number().int(),
});
export type DocumentBody = z.infer<typeof DocumentBodySchema>;
export const DocumentSchema = DocumentBodySchema.extend({
id: z.number(),
created_at: z.number(),
deleted_at: z.number().nullable(),
});
export type Document = z.infer<typeof DocumentSchema>;
export const TagSchema = z.object({
name: z.string(),
description: z.string().nullable(),
});
export type Tag = z.infer<typeof TagSchema>;
export const TagRelationSchema = z.object({
doc_id: z.number(),
tag_name: z.string(),
});
export type TagRelation = z.infer<typeof TagRelationSchema>;
export const PermissionSchema = z.object({
username: z.string(),
name: z.string(),
});
export type Permission = z.infer<typeof PermissionSchema>;
export const UserSchema = z.object({
username: z.string(),
password_hash: z.string(),
password_salt: z.string(),
});
export type User = z.infer<typeof UserSchema>;
export const SchemaMigrationSchema = z.object({
version: z.string().nullable(),
dirty: z.boolean(),
});
export type SchemaMigration = z.infer<typeof SchemaMigrationSchema>;
const WorkStatusEnum = z.enum(["pending", "done", "error"]);
export const WorkSchema = z.object({
uuid: z.string(),
type: z.literal("rehash"),
status: WorkStatusEnum,
detail: z.string(),
});
export type Work = z.infer<typeof WorkSchema>;
export const QueryListOptionSchema = z.object({
/**
* search word
*/
word: z.string().optional(),
allow_tag: z.array(z.string()).default([]),
limit: z.number().default(20),
use_offset: z.boolean().default(false),
cursor: z.number().optional(),
offset: z.number().optional(),
eager_loading: z.boolean().default(true),
content_type: z.string().optional(),
});
export type QueryListOption = z.infer<typeof QueryListOptionSchema>;
export const LoginRequestSchema = z.object({
username: z.string(),
password: z.string(),
});
export type LoginRequest = z.infer<typeof LoginRequestSchema>;
export const LoginResetRequestSchema = z.object({
username: z.string(),
oldpassword: z.string(),
newpassword: z.string(),
});
export type LoginResetRequest = z.infer<typeof LoginResetRequestSchema>;

View File

@ -12,7 +12,7 @@ export interface DocTagRelation {
export interface Document {
additional: string | null;
basepath: string;
content_hash: string | null;
content_hash: string;
content_type: string;
created_at: number;
deleted_at: number | null;
@ -20,6 +20,7 @@ export interface Document {
id: Generated<number>;
modified_at: number;
title: string;
pagenum: number;
}
export interface Permissions {

View File

@ -0,0 +1,110 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig to read more about this file */
/* Projects */
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
// "jsx": "preserve", /* Specify what JSX code is generated. */
// "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
/* Modules */
"module": "NodeNext", /* Specify what module code is generated. */
// "rootDir": "./", /* Specify the root folder within your source files. */
// "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
// "resolveJsonModule": true, /* Enable importing .json files. */
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
"allowImportingTsExtensions": true, /* Allow importing TypeScript files with a '.ts' extension. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
// "outDir": "./dist", /* Specify an output folder for all emitted files. */
// "removeComments": true, /* Disable emitting comments. */
"noEmit": true, /* Disable emitting files from a compilation. */
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
/* Type Checking */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */
}
}

View File

@ -3,10 +3,10 @@
"version": "1.0.0",
"description": "",
"main": "build/app.js",
"type": "module",
"scripts": {
"compile": "swc src --out-dir compile",
"dev": "nodemon -r @swc-node/register --enable-source-maps --exec node app.ts",
"start": "node compile/src/app.js"
"dev": "tsx watch src/app.ts",
"start": "tsx src/app.ts"
},
"author": "",
"license": "ISC",
@ -14,8 +14,9 @@
"@zip.js/zip.js": "^2.7.40",
"better-sqlite3": "^9.4.3",
"chokidar": "^3.6.0",
"dbtype": "workspace:dbtype",
"dotenv": "^16.4.5",
"jsonwebtoken": "^8.5.1",
"jose": "^5.9.3",
"koa": "^2.15.2",
"koa-bodyparser": "^4.4.1",
"koa-compose": "^4.1.0",
@ -25,9 +26,6 @@
"tiny-async-pool": "^1.3.0"
},
"devDependencies": {
"@swc-node/register": "^1.9.0",
"@swc/cli": "^0.3.10",
"@swc/core": "^1.4.11",
"@types/better-sqlite3": "^7.6.9",
"@types/jsonwebtoken": "^8.5.9",
"@types/koa": "^2.15.0",
@ -36,7 +34,8 @@
"@types/koa-router": "^7.4.8",
"@types/node": ">=20.0.0",
"@types/tiny-async-pool": "^1.0.5",
"dbtype": "workspace:^",
"nodemon": "^3.1.0"
"nodemon": "^3.1.0",
"tsx": "^4.19.1",
"typescript": "^5.2.2"
}
}

View File

@ -1,4 +1,4 @@
import { create_server } from "./src/server";
import { create_server } from "./server.ts";
create_server().then((server) => {
server.start_server();

View File

@ -1,22 +0,0 @@
import type { Knex as k } from "knex";
export namespace Knex {
export const config: {
development: k.Config;
production: k.Config;
} = {
development: {
client: "sqlite3",
connection: {
filename: "./devdb.sqlite3",
},
debug: true,
},
production: {
client: "sqlite3",
connection: {
filename: "./db.sqlite3",
},
},
};
}

View File

@ -1,7 +1,7 @@
import { extname } from "node:path";
import type { DocumentBody } from "dbtype/api";
import { readZip } from "../util/zipwrap";
import { type ContentConstructOption, createDefaultClass, registerContentReferrer } from "./file";
import type { DocumentBody } from "dbtype";
import { readZip } from "../util/zipwrap.ts";
import { type ContentConstructOption, createDefaultClass, registerContentReferrer } from "./file.ts";
import { TextWriter } from "@zip.js/zip.js";
type ComicType = "doujinshi" | "artist cg" | "manga" | "western";
@ -63,6 +63,7 @@ export class ComicReferrer extends createDefaultClass("comic") {
additional: {
page: this.pagenum,
},
pagenum: this.pagenum,
tags: tags,
};
}

View File

@ -1,7 +1,7 @@
import { createHash } from "node:crypto";
import { promises, type Stats } from "node:fs";
import path, { extname } from "node:path";
import type { DocumentBody } from "dbtype/api";
import type { DocumentBody } from "dbtype";
/**
* content file or directory referrer
*/
@ -11,6 +11,7 @@ export interface ContentFile {
readonly path: string;
readonly type: string;
}
export type ContentConstructOption = {
hash: string;
};
@ -50,6 +51,7 @@ export const createDefaultClass = (type: string): ContentFileConstructor => {
tags: [],
content_hash: await this.getHash(),
modified_at: await this.getMtime(),
pagenum: 0,
} as DocumentBody;
return ret;
}

View File

@ -1,3 +1,4 @@
import "./comic";
import "./video";
export { ContentFile, createContentFile } from "./file";
export { createContentFile } from "./file.ts";
export type { ContentFile } from "./file.ts";

View File

@ -1,5 +1,5 @@
import { registerContentReferrer } from "./file";
import { createDefaultClass } from "./file";
import { registerContentReferrer } from "./file.ts";
import { createDefaultClass } from "./file.ts";
export class VideoReferrer extends createDefaultClass("video") {
}

View File

@ -1,13 +1,16 @@
import { getKysely } from "./kysely";
import { getKysely } from "./kysely.ts";
import { jsonArrayFrom } from "kysely/helpers/sqlite";
import type { DocumentAccessor } from "../model/doc";
import type { DocumentAccessor } from "../model/doc.ts";
import type {
Document,
QueryListOption,
db,
DocumentBody
} from "dbtype/api";
} from "dbtype";
import type { NotNull } from "kysely";
import { MyParseJSONResultsPlugin } from "./plugin";
import { MyParseJSONResultsPlugin } from "./plugin.ts";
type DBDocument = db.Document;
export type DBTagContentRelation = {
doc_id: number;

View File

@ -1,6 +1,8 @@
import { Kysely, ParseJSONResultsPlugin, SqliteDialect } from "kysely";
import SqliteDatabase from "better-sqlite3";
import type { DB } from "dbtype/types";
import type { db } from "dbtype";
type DB = db.DB;
export function createSqliteDialect() {
const url = process.env.DATABASE_URL;

View File

@ -1,3 +1,3 @@
export * from "./doc";
export * from "./tag";
export * from "./user";
export * from "./doc.ts";
export * from "./tag.ts";
export * from "./user.ts";

View File

@ -1,7 +1,6 @@
import { getKysely } from "./kysely";
import { jsonArrayFrom } from "kysely/helpers/sqlite";
import type { Tag, TagAccessor, TagCount } from "../model/tag";
import type { DBTagContentRelation } from "./doc";
import { getKysely } from "./kysely.ts";
import type { Tag, TagAccessor, TagCount } from "../model/tag.ts";
class SqliteTagAccessor implements TagAccessor {

View File

@ -1,5 +1,5 @@
import { getKysely } from "./kysely";
import { type IUser, Password, type UserAccessor, type UserCreateInput } from "../model/user";
import { getKysely } from "./kysely.ts";
import { type IUser, Password, type UserAccessor, type UserCreateInput } from "../model/user.ts";
class SqliteUser implements IUser {
readonly username: string;

View File

@ -1,8 +1,8 @@
import { basename, dirname, join as pathjoin } from "node:path";
import { ContentFile, createContentFile } from "../content/mod";
import type { Document, DocumentAccessor } from "../model/mod";
import { ContentList } from "./content_list";
import type { IDiffWatcher } from "./watcher";
import { basename, dirname } from "node:path";
import { createContentFile } from "../content/mod.ts";
import type { Document, DocumentAccessor } from "../model/mod.ts";
import { ContentList } from "./content_list.ts";
import type { IDiffWatcher } from "./watcher.ts";
// refactoring needed.
export class ContentDiffHandler {
@ -22,6 +22,11 @@ export class ContentDiffHandler {
async setup() {
const deleted = await this.doc_cntr.findDeleted(this.content_type);
for (const it of deleted) {
if (it.deleted_at === null) {
// it should not be happened. but when it happens, ignore it.
console.error("It should not happen");
continue;
}
this.tombstone.set(it.content_hash, it);
}
}
@ -70,7 +75,7 @@ export class ContentDiffHandler {
id: dbc[0].id,
deleted_at: Date.now(),
});
this.tombstone.set(dbc[0].content_hash, dbc[0]);
this.tombstone.set(content_hash, dbc[0]);
}
private async OnCreated(cpath: string) {
const basepath = dirname(cpath);

View File

@ -1,2 +1,2 @@
export * from "./diff";
export * from "./router";
export * from "./diff.ts";
export * from "./router.ts";

View File

@ -1,4 +1,4 @@
import { ConfigManager } from "../../util/configRW";
import { ConfigManager } from "../../util/configRW.ts";
import ComicSchema from "./ComicConfig.schema.json";
export interface ComicConfig {
watch: string[];

View File

@ -1,7 +1,7 @@
import { ComicConfig } from "./ComicConfig";
import { WatcherCompositer } from "./compositer";
import { RecursiveWatcher } from "./recursive_watcher";
import { WatcherFilter } from "./watcher_filter";
import { ComicConfig } from "./ComicConfig.ts";
import { WatcherCompositer } from "./compositer.ts";
import { RecursiveWatcher } from "./recursive_watcher.ts";
import { WatcherFilter } from "./watcher_filter.ts";
const createComicWatcherBase = (path: string) => {
return new WatcherFilter(new RecursiveWatcher(path), (x) => x.endsWith(".zip"));

View File

@ -1,9 +1,9 @@
import event from "node:events";
import { type FSWatcher, promises, watch } from "node:fs";
import { join } from "node:path";
import type { DocumentAccessor } from "../../model/doc";
import type { DiffWatcherEvent, IDiffWatcher } from "../watcher";
import { setupHelp } from "./util";
import type { DocumentAccessor } from "../../model/doc.ts";
import type { DiffWatcherEvent, IDiffWatcher } from "../watcher.ts";
import { setupHelp } from "./util.ts";
const { readdir } = promises;
@ -20,7 +20,10 @@ export class CommonDiffWatcher extends event.EventEmitter implements IDiffWatche
constructor(path: string) {
super();
this._path = path;
this._watcher = watch(this._path, { persistent: true, recursive: false }, async (eventType, filename) => {
this._watcher = watch(this._path, { persistent: true, recursive: false },
async (eventType, filename) => {
// if filename is null, it means the path is not exist
if (filename === null) return;
if (eventType === "rename") {
const cur = await readdir(this._path);
// add

View File

@ -1,6 +1,6 @@
import { EventEmitter } from "node:events";
import type { DocumentAccessor } from "../../model/doc";
import { type DiffWatcherEvent, type IDiffWatcher, linkWatcher } from "../watcher";
import type { DocumentAccessor } from "../../model/doc.ts";
import { type DiffWatcherEvent, type IDiffWatcher, linkWatcher } from "../watcher.ts";
export class WatcherCompositer extends EventEmitter implements IDiffWatcher {
refWatchers: IDiffWatcher[];

View File

@ -1,8 +1,8 @@
import { type FSWatcher, watch } from "chokidar";
import { EventEmitter } from "node:events";
import type { DocumentAccessor } from "../../model/doc";
import type { DiffWatcherEvent, IDiffWatcher } from "../watcher";
import { setupRecursive } from "./util";
import type { DocumentAccessor } from "../../model/doc.ts";
import type { DiffWatcherEvent, IDiffWatcher } from "../watcher.ts";
import { setupRecursive } from "./util.ts";
type RecursiveWatcherOption = {
/** @default true */

View File

@ -1,8 +1,8 @@
import { promises } from "node:fs";
import { join } from "node:path";
const { readdir } = promises;
import type { DocumentAccessor } from "../../model/doc";
import type { IDiffWatcher } from "../watcher";
import type { DocumentAccessor } from "../../model/doc.ts";
import type { IDiffWatcher } from "../watcher.ts";
function setupCommon(watcher: IDiffWatcher, basepath: string, initial_filenames: string[], cur: string[]) {
// Todo : reduce O(nm) to O(n+m) using hash map.

View File

@ -1,6 +1,6 @@
import { EventEmitter } from "node:events";
import type { DocumentAccessor } from "../../model/doc";
import { type DiffWatcherEvent, type IDiffWatcher, linkWatcher } from "../watcher";
import type { DocumentAccessor } from "../../model/doc.ts";
import { type DiffWatcherEvent, type IDiffWatcher, linkWatcher } from "../watcher.ts";
export class WatcherFilter extends EventEmitter implements IDiffWatcher {
refWatcher: IDiffWatcher;

View File

@ -1,9 +1,10 @@
import { sign, TokenExpiredError, verify } from "jsonwebtoken";
import { SignJWT, jwtVerify, errors } from "jose";
import type Koa from "koa";
import Router from "koa-router";
import type { IUser, UserAccessor } from "./model/mod";
import { sendError } from "./route/error_handler";
import { get_setting } from "./SettingConfig";
import type { IUser, UserAccessor } from "./model/mod.ts";
import { sendError } from "./route/error_handler.ts";
import { get_setting } from "./SettingConfig.ts";
import { LoginRequestSchema, LoginResetRequestSchema } from "dbtype";
type PayloadInfo = {
username: string;
@ -18,40 +19,50 @@ const isUserState = (obj: object | string): obj is PayloadInfo => {
if (typeof obj === "string") return false;
return "username" in obj && "permission" in obj && Array.isArray((obj as { permission: unknown }).permission);
};
type RefreshPayloadInfo = { username: string };
const isRefreshToken = (obj: object | string): obj is RefreshPayloadInfo => {
if (typeof obj === "string") return false;
return "username" in obj && typeof (obj as { username: unknown }).username === "string";
};
const accessExpiredTime = 60 * 60 * 2; // 2 hour
async function createAccessToken(payload: PayloadInfo, secret: string) {
return await new SignJWT(payload)
.setProtectedHeader({ alg: "HS256" })
.setExpirationTime("2h")
.sign(new TextEncoder().encode(secret));
}
const refreshExpiredTime = 60 * 60 * 24 * 14; // 14 day;
async function createRefreshToken(payload: RefreshPayloadInfo, secret: string) {
return await new SignJWT(payload)
.setProtectedHeader({ alg: "HS256" })
.setExpirationTime("14d")
.sign(new TextEncoder().encode(secret));
}
class TokenExpiredError extends Error {
constructor() {
super("Token expired");
}
}
async function verifyToken(token: string, secret: string) {
try {
const { payload } = await jwtVerify(token, new TextEncoder().encode(secret));
return payload as PayloadInfo;
} catch (error) {
if (error instanceof errors.JWTExpired) {
throw new TokenExpiredError();
}
throw new Error("Invalid token");
}
}
export const accessTokenName = "access_token";
export const refreshTokenName = "refresh_token";
const accessExpiredTime = 60 * 60; // 1 hour
const refreshExpiredTime = 60 * 60 * 24 * 14; // 14 day;
export const getAdminAccessTokenValue = () => {
const { jwt_secretkey } = get_setting();
return publishAccessToken(jwt_secretkey, "admin", [], accessExpiredTime);
};
export const getAdminRefreshTokenValue = () => {
const { jwt_secretkey } = get_setting();
return publishRefreshToken(jwt_secretkey, "admin", refreshExpiredTime);
};
const publishAccessToken = (secretKey: string, username: string, permission: string[], expiredtime: number) => {
const payload = sign(
{
username: username,
permission: permission,
},
secretKey,
{ expiresIn: expiredtime },
);
return payload;
};
const publishRefreshToken = (secretKey: string, username: string, expiredtime: number) => {
const payload = sign({ username: username }, secretKey, { expiresIn: expiredtime });
return payload;
};
function setToken(ctx: Koa.Context, token_name: string, token_payload: string | null, expiredtime: number) {
const setting = get_setting();
if (token_payload === null && !ctx.cookies.get(token_name)) {
@ -64,20 +75,15 @@ function setToken(ctx: Koa.Context, token_name: string, token_payload: string |
expires: new Date(Date.now() + expiredtime * 1000),
});
}
export const createLoginMiddleware = (userController: UserAccessor) => async (ctx: Koa.Context, _next: Koa.Next) => {
const setting = get_setting();
const secretKey = setting.jwt_secretkey;
const body = ctx.request.body;
// check format
if (typeof body === "string" || !("username" in body) || !("password" in body)) {
return sendError(400, "invalid form : username or password is not found in query.");
}
const username = body.username;
const password = body.password;
// check type
if (typeof username !== "string" || typeof password !== "string") {
return sendError(400, "invalid form : username or password is not string");
}
const {
username,
password,
} = LoginRequestSchema.parse(body);
// if admin login is forbidden?
if (username === "admin" && setting.forbid_remote_admin_login) {
return sendError(403, "forbidden remote admin login");
@ -91,8 +97,13 @@ export const createLoginMiddleware = (userController: UserAccessor) => async (ct
}
// create token
const userPermission = await user.get_permissions();
const payload = publishAccessToken(secretKey, user.username, userPermission, accessExpiredTime);
const payload2 = publishRefreshToken(secretKey, user.username, refreshExpiredTime);
const payload = await createAccessToken({
username: user.username,
permission: userPermission,
}, secretKey);
const payload2 = await createRefreshToken({
username: user.username,
}, secretKey);
setToken(ctx, accessTokenName, payload, accessExpiredTime);
setToken(ctx, refreshTokenName, payload2, refreshExpiredTime);
ctx.body = {
@ -115,6 +126,7 @@ export const LogoutMiddleware = (ctx: Koa.Context, next: Koa.Next) => {
};
return;
};
export const createUserMiddleWare =
(userController: UserAccessor) => async (ctx: Koa.ParameterizedContext<UserState>, next: Koa.Next) => {
const refreshToken = refreshTokenHandler(userController);
@ -127,50 +139,63 @@ export const createUserMiddleWare =
};
return await refreshToken(ctx, setGuest, next);
};
const refreshTokenHandler = (cntr: UserAccessor) => async (ctx: Koa.Context, fail: Koa.Next, next: Koa.Next) => {
const accessPayload = ctx.cookies.get(accessTokenName);
const setting = get_setting();
const secretKey = setting.jwt_secretkey;
if (accessPayload === undefined) {
if (!accessPayload) {
return await checkRefreshAndUpdate();
}
try {
const o = verify(accessPayload, secretKey);
if (isUserState(o)) {
ctx.state.user = o;
const payload = await verifyToken(accessPayload, secretKey);
if (isUserState(payload)) {
ctx.state.user = payload;
return await next();
}
console.error("invalid token detected");
throw new Error("token form invalid");
} catch (e) {
if (e instanceof TokenExpiredError) {
console.error("Invalid token detected");
throw new Error("Token form invalid");
} catch (error) {
if (error instanceof TokenExpiredError) {
return await checkRefreshAndUpdate();
}throw e;
}
throw error;
}
async function checkRefreshAndUpdate() {
const refreshPayload = ctx.cookies.get(refreshTokenName);
if (refreshPayload === undefined) {
return await fail(); // refresh token doesn't exist
if (!refreshPayload) {
return await fail(); // Refresh token doesn't exist
}
try {
const o = verify(refreshPayload, secretKey);
if (isRefreshToken(o)) {
const user = await cntr.findUser(o.username);
if (user === undefined) return await fail(); // already non-existence user
const perm = await user.get_permissions();
const payload = publishAccessToken(secretKey, user.username, perm, accessExpiredTime);
setToken(ctx, accessTokenName, payload, accessExpiredTime);
ctx.state.user = { username: o.username, permission: perm };
const payload = await verifyToken(refreshPayload, secretKey);
if (isRefreshToken(payload)) {
const user = await cntr.findUser(payload.username);
if (!user) return await fail(); // User does not exist
const permissions = await user.get_permissions();
const newAccessToken = await createAccessToken({
username: user.username,
permission: permissions,
}, secretKey);
setToken(ctx, accessTokenName, newAccessToken, accessExpiredTime);
ctx.state.user = { username: payload.username, permission: permissions };
} else {
console.error("invalid token detected");
throw new Error("token form invalid");
console.error("Invalid token detected");
throw new Error("Token form invalid");
}
} catch (e) {
if (e instanceof TokenExpiredError) {
// refresh token is expired.
} catch (error) {
if (error instanceof TokenExpiredError) {
// Refresh token is expired
return await fail();
}throw e;
}
throw error;
}
return await next();
}
};
@ -195,25 +220,24 @@ export const createRefreshTokenMiddleware = (cntr: UserAccessor) => async (ctx:
ctx.type = "json";
}
};
export const resetPasswordMiddleware = (cntr: UserAccessor) => async (ctx: Koa.Context, next: Koa.Next) => {
const body = ctx.request.body;
if (typeof body !== "object" || !("username" in body) || !("oldpassword" in body) || !("newpassword" in body)) {
return sendError(400, "request body is invalid format");
}
const username = body.username;
const oldpw = body.oldpassword;
const newpw = body.newpassword;
if (typeof username !== "string" || typeof oldpw !== "string" || typeof newpw !== "string") {
return sendError(400, "request body is invalid format");
}
const {
username,
oldpassword,
newpassword,
} = LoginResetRequestSchema.parse(body);
const user = await cntr.findUser(username);
if (user === undefined) {
return sendError(403, "not authorized");
}
if (!user.password.check_password(oldpw)) {
if (!user.password.check_password(oldpassword)) {
return sendError(403, "not authorized");
}
user.reset_password(newpw);
user.reset_password(newpassword);
ctx.body = { ok: true };
ctx.type = "json";
};

View File

@ -1,9 +1,16 @@
import { check_type } from "../util/type_check";
import { check_type } from "../util/type_check.ts";
import type {
DocumentBody,
QueryListOption,
Document,
QueryListOption
} from "dbtype/api";
db
} from "dbtype";
type DBDocument = db.Document;
export type {
Document,
DBDocument
};
export const MetaContentBody = {
title: "string",

View File

@ -1,3 +1,3 @@
export * from "./doc";
export * from "./tag";
export * from "./user";
export * from "./doc.ts";
export * from "./tag.ts";
export * from "./user.ts";

View File

@ -1,6 +1,7 @@
import { createHmac, randomBytes } from "node:crypto";
function hashForPassword(salt: string, password: string) {
// TODO: use pbkdf2 or argon2
return createHmac("sha256", salt).update(password).digest("hex");
}
function createPasswordHashAndSalt(password: string): { salt: string; hash: string } {

View File

@ -1,9 +1,9 @@
import type { DefaultContext, Middleware, Next, ParameterizedContext } from "koa";
import compose from "koa-compose";
import Router from "koa-router";
import ComicRouter from "./comic";
import type { ContentContext } from "./context";
import VideoRouter from "./video";
import ComicRouter from "./comic.ts";
import type { ContentContext } from "./context.ts";
import VideoRouter from "./video.ts";
const table: { [s: string]: Router | undefined } = {
comic: new ComicRouter(),

View File

@ -1,8 +1,8 @@
import type { Context } from "koa";
import Router from "koa-router";
import { createReadableStreamFromZip, entriesByNaturalOrder, readZip } from "../util/zipwrap";
import type { ContentContext } from "./context";
import { since_last_modified } from "./util";
import { createReadableStreamFromZip, entriesByNaturalOrder, readZip } from "../util/zipwrap.ts";
import type { ContentContext } from "./context.ts";
import { since_last_modified } from "./util.ts";
import type { ZipReader } from "@zip.js/zip.js";
import type { FileHandle } from "node:fs/promises";
import { Readable } from "node:stream";

View File

@ -4,17 +4,17 @@ import { join } from "node:path";
import type {
Document,
QueryListOption,
} from "dbtype/api";
import type { DocumentAccessor } from "../model/doc";
} from "dbtype";
import type { DocumentAccessor } from "../model/doc.ts";
import {
AdminOnlyMiddleware as AdminOnly,
createPermissionCheckMiddleware as PerCheck,
Permission as Per,
} from "../permission/permission";
import { AllContentRouter } from "./all";
import type { ContentLocation } from "./context";
import { sendError } from "./error_handler";
import { ParseQueryArgString, ParseQueryArray, ParseQueryBoolean, ParseQueryNumber } from "./util";
} from "../permission/permission.ts";
import { AllContentRouter } from "./all.ts";
import type { ContentLocation } from "./context.ts";
import { sendError } from "./error_handler.ts";
import { ParseQueryArgString, ParseQueryArray, ParseQueryBoolean, ParseQueryNumber } from "./util.ts";
const ContentIDHandler = (controller: DocumentAccessor) => async (ctx: Context, next: Next) => {
const num = Number.parseInt(ctx.params.num);
@ -35,14 +35,21 @@ const ContentTagIDHandler = (controller: DocumentAccessor) => async (ctx: Contex
ctx.type = "json";
};
const ContentQueryHandler = (controller: DocumentAccessor) => async (ctx: Context, next: Next) => {
const query_limit = ctx.query.limit;
const query_cursor = ctx.query.cursor;
const query_word = ctx.query.word;
const query_content_type = ctx.query.content_type;
const query_offset = ctx.query.offset;
const query_use_offset = ctx.query.use_offset;
if (Array.isArray(query_limit) ||Array.isArray(query_cursor) ||Array.isArray(query_word) ||Array.isArray(query_content_type) ||Array.isArray(query_offset) ||Array.isArray(query_use_offset)
) {
if ([
query_limit,
query_cursor,
query_word,
query_content_type,
query_offset,
query_use_offset,
].some((x) => Array.isArray(x))) {
return sendError(400, "paramter can not be array");
}
const limit = Math.min(ParseQueryNumber(query_limit) ?? 20, 100);
@ -65,7 +72,7 @@ const ContentQueryHandler = (controller: DocumentAccessor) => async (ctx: Contex
cursor: cursor,
eager_loading: true,
offset: offset,
use_offset: use_offset,
use_offset: use_offset ?? false,
content_type: content_type,
};
const document = await controller.findList(option);
@ -105,6 +112,7 @@ const AddTagHandler = (controller: DocumentAccessor) => async (ctx: Context, nex
ctx.body = JSON.stringify(r);
ctx.type = "json";
};
const DelTagHandler = (controller: DocumentAccessor) => async (ctx: Context, next: Next) => {
let tag_name = ctx.params.tag;
const num = Number.parseInt(ctx.params.num);
@ -120,12 +128,14 @@ const DelTagHandler = (controller: DocumentAccessor) => async (ctx: Context, nex
ctx.body = JSON.stringify(r);
ctx.type = "json";
};
const DeleteContentHandler = (controller: DocumentAccessor) => async (ctx: Context, next: Next) => {
const num = Number.parseInt(ctx.params.num);
const r = await controller.del(num);
ctx.body = JSON.stringify(r);
ctx.type = "json";
};
const ContentHandler = (controller: DocumentAccessor) => async (ctx: Context, next: Next) => {
const num = Number.parseInt(ctx.params.num);
const document = await controller.findById(num, true);

View File

@ -1,3 +1,4 @@
import { ZodError } from "dbtype";
import type { Context, Next } from "koa";
export interface ErrorFormat {
@ -36,7 +37,17 @@ export const error_handler = async (ctx: Context, next: Next) => {
};
ctx.status = err.code;
ctx.body = body;
} else {
}
else if (err instanceof ZodError) {
const body: ErrorFormat = {
code: 400,
message: "BadRequest",
detail: err.errors.map((x) => x.message).join(", "),
};
ctx.status = 400;
ctx.body = body;
}
else {
throw err;
}
}

View File

@ -1,8 +1,8 @@
import { type Context, Next } from "koa";
import Router, { type RouterContext } from "koa-router";
import type { TagAccessor } from "../model/tag";
import { createPermissionCheckMiddleware as PerCheck, Permission } from "../permission/permission";
import { sendError } from "./error_handler";
import type { TagAccessor } from "../model/tag.ts";
import { createPermissionCheckMiddleware as PerCheck, Permission } from "../permission/permission.ts";
import { sendError } from "./error_handler.ts";
export function getTagRounter(tagController: TagAccessor) {
const router = new Router();

View File

@ -1,7 +1,7 @@
import { createReadStream, promises } from "node:fs";
import type { Context } from "koa";
import Router from "koa-router";
import type { ContentContext } from "./context";
import type { ContentContext } from "./context.ts";
export async function renderVideo(ctx: Context, path: string) {
const ext = path.trim().split(".").pop();

View File

@ -1,21 +1,21 @@
import Koa from "koa";
import Router from "koa-router";
import { connectDB } from "./database";
import { createDiffRouter, DiffManager } from "./diff/mod";
import { get_setting, SettingConfig } from "./SettingConfig";
import { connectDB } from "./database.ts";
import { createDiffRouter, DiffManager } from "./diff/mod.ts";
import { get_setting, SettingConfig } from "./SettingConfig.ts";
import { createReadStream, readFileSync } from "node:fs";
import bodyparser from "koa-bodyparser";
import { createSqliteDocumentAccessor, createSqliteTagController, createSqliteUserController } from "./db/mod";
import { createLoginRouter, createUserMiddleWare, getAdmin, isAdminFirst } from "./login";
import getContentRouter from "./route/contents";
import { error_handler } from "./route/error_handler";
import { createSqliteDocumentAccessor, createSqliteTagController, createSqliteUserController } from "./db/mod.ts";
import { createLoginRouter, createUserMiddleWare, getAdmin, isAdminFirst } from "./login.ts";
import getContentRouter from "./route/contents.ts";
import { error_handler } from "./route/error_handler.ts";
import { createInterface as createReadlineInterface } from "node:readline";
import { createComicWatcher } from "./diff/watcher/comic_watcher";
import type { DocumentAccessor, TagAccessor, UserAccessor } from "./model/mod";
import { getTagRounter } from "./route/tags";
import { createComicWatcher } from "./diff/watcher/comic_watcher.ts";
import type { DocumentAccessor, TagAccessor, UserAccessor } from "./model/mod.ts";
import { getTagRounter } from "./route/tags.ts";
import { config } from "dotenv";
import { extname, join } from "node:path";
@ -150,7 +150,7 @@ class ServerApplication {
meta = createOgTagContent(
doc.title,
doc.tags.join(", "),
`https://aeolian.prelude.duckdns.org/api/doc/${docId}/comic/thumbnail`,
`https://aeolian.monoid.top/api/doc/${docId}/comic/thumbnail`,
);
}
const html = makeMetaTagInjectedHTML(this.index_html, meta);

View File

@ -0,0 +1,44 @@
import { open } from "fs/promises";
/**
* get hash of file or directory.
*
* Contains a hashing method that matches the hashing method described
* here: [https://pypi.org/project/oshash/](https://pypi.org/project/oshash/)
* This hashing method is particularly useful when you don't want to read
* an entire file's bytes to generate a hash, provided you trust that any
* changes to the file will cause byte differences in the first and last
* bytes of the file, or a change to its file size.
*/
export async function oshash(
path: string,
){
const chunkSize = 4096;
const minFileSize = chunkSize * 2;
const fd = await open(path, "r");
const st = await fd.stat();
let hash = BigInt(st.size);
if (st.size < minFileSize){
throw new Error("File is too small to hash");
}
// read first and last chunk
const firstChunk = Buffer.alloc(chunkSize);
await fd.read(firstChunk, 0, chunkSize, 0);
const lastChunk = Buffer.alloc(chunkSize);
await fd.read(lastChunk, 0, chunkSize, st.size - chunkSize);
// iterate over first and last chunk.
// for each uint64_t, add it to the hash.
for (let i = 0; i < chunkSize; i += 8){
hash += firstChunk.readBigUInt64LE(i);
// prevent overflow
hash = (hash & 0xFFFFFFFFFFFFFFFFn);
}
for (let i = 0; i < chunkSize; i += 8){
hash += lastChunk.readBigUInt64LE(i);
// prevent overflow
hash = (hash & 0xFFFFFFFFFFFFFFFFn);
}
return hash;
}

View File

@ -5,7 +5,7 @@
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
"target": "esnext", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
"module": "NodeNext", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
"lib": ["DOM", "ESNext"],
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
@ -14,15 +14,16 @@
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./build", /* Redirect output structure to the directory. */
"outDir": "./dist", /* Redirect output structure to the directory. */
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
"noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
"allowImportingTsExtensions": true,
/* Strict Type-Checking Options */
"strict": true, /* Enable all strict type-checking options. */
@ -41,9 +42,12 @@
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
"moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
"moduleResolution": "NodeNext", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
"baseUrl": ".", /* Base directory to resolve non-absolute module names. */
/* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "paths": {
// "dbtype/*": ["node_modules/dbtype/*"],
// },
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
@ -67,6 +71,6 @@
"skipLibCheck": true, /* Skip type checking of declaration files. */
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
},
"include": ["./"],
"exclude": ["src/client", "app", "seeds"]
"include": ["src"],
"exclude": ["app", "seeds", "node_modules"]
}

File diff suppressed because it is too large Load Diff