fix bug on ecconreset

This commit is contained in:
monoid 2021-01-25 21:48:00 +09:00
parent 98a5f52dab
commit 2b5839826b
3 changed files with 100 additions and 48 deletions

View File

@ -1,5 +1,5 @@
import {ContentFile, createDefaultClass,registerContentReferrer, ContentConstructOption} from './file';
import {readZip,createReadStreamFromZip, readAllFromZip} from '../util/zipwrap';
import {readZip, readAllFromZip} from '../util/zipwrap';
import { DocumentBody } from '../model/doc';
import {extname} from 'path';

View File

@ -1,50 +1,101 @@
import { Context, DefaultContext, DefaultState, Next } from "koa";
import {
createReadableStreamFromZip,
entriesByNaturalOrder,
readZip,
} from "../util/zipwrap";
import { since_last_modified } from "./util";
import { ContentContext } from "./context";
import Router from "koa-router";
import StreamZip from "node-stream-zip";
import {Context, DefaultContext, DefaultState, Next} from 'koa';
import {readZip,entriesByNaturalOrder,createReadStreamFromZip} from '../util/zipwrap';
import {since_last_modified} from './util';
import {ContentContext} from './context';
import Router from 'koa-router';
/**
* zip stream cache.
*/
async function renderZipImage(ctx: Context,path : string, page:number){
const image_ext = ['gif', 'png', 'jpeg', 'bmp', 'webp', 'jpg'];
let zip = await readZip(path);
const entries = entriesByNaturalOrder(zip).filter(x=>{
const ext = x.name.split('.').pop();
return ext !== undefined && image_ext.includes(ext);
});
if(0 <= page && page < entries.length){
const entry = entries[page];
const last_modified = new Date(entry.time);
if(since_last_modified(ctx,last_modified)){
let ZipStreamCache: { [path: string]: [StreamZip, number] } = {};
async function acquireZip(path: string) {
if (ZipStreamCache[path] === undefined) {
const ret = await readZip(path);
if (ZipStreamCache[path] === undefined) {
ZipStreamCache[path] = [ret, 1];
console.log(`acquire ${path} 1`);
return ret;
}
ret.close();
}
const [ret, refCount] = ZipStreamCache[path];
ZipStreamCache[path] = [ret, refCount + 1];
console.log(`acquire ${path} ${refCount + 1}`);
return ret;
}
function releaseZip(path: string) {
const obj = ZipStreamCache[path];
if (obj === undefined) throw new Error("error! key invalid");
const [ref, refCount] = obj;
console.log(`release ${path} : ${refCount}`);
if (refCount === 1) {
ref.close();
delete ZipStreamCache[path];
return;
}
const read_stream = (await createReadStreamFromZip(zip,entry));
read_stream.on('close',()=>zip.close());
ZipStreamCache[path] = [ref, refCount - 1];
}
async function renderZipImage(ctx: Context, path: string, page: number) {
const image_ext = ["gif", "png", "jpeg", "bmp", "webp", "jpg"];
console.log(`opened ${page}`);
let zip = await acquireZip(path);
const entries = entriesByNaturalOrder(zip).filter((x) => {
const ext = x.name.split(".").pop();
return ext !== undefined && image_ext.includes(ext);
});
if (0 <= page && page < entries.length) {
const entry = entries[page];
const last_modified = new Date(entry.time);
if (since_last_modified(ctx, last_modified)) {
return;
}
const read_stream = (await createReadableStreamFromZip(zip, entry));
/**Exceptions (ECONNRESET, ECONNABORTED) may be thrown when processing this request
* for reasons such as when the browser unexpectedly closes the connection.
* Once such an exception is raised, the stream is not properly destroyed,
* so there is a problem with the zlib stream being accessed even after the stream is closed.
* So it waits for 100 ms and releases it.
* Additionaly, there is a risk of memory leak becuase zlib stream is not properly destroyed.
* @todo modify function 'stream' in 'node-stream-zip' library to prevent memory leak*/
read_stream.once("close", () => {
setTimeout(() => {
releaseZip(path);
}, 100);
});
ctx.body = read_stream;
ctx.response.length = entry.size;
//console.log(`${entry.name}'s ${page}:${entry.size}`);
ctx.response.type = entry.name.split(".").pop() as string;
ctx.status = 200;
ctx.set('Date', new Date().toUTCString());
ctx.set("Last-Modified",last_modified.toUTCString());
}
else{
ctx.set("Date", new Date().toUTCString());
ctx.set("Last-Modified", last_modified.toUTCString());
} else {
ctx.status = 404;
}
}
export class MangaRouter extends Router<ContentContext>{
constructor(){
export class MangaRouter extends Router<ContentContext> {
constructor() {
super();
this.get("/",async (ctx,next)=>{
await renderZipImage(ctx,ctx.state.location.path,0);
this.get("/", async (ctx, next) => {
await renderZipImage(ctx, ctx.state.location.path, 0);
});
this.get("/:page(\\d+)",async (ctx,next)=>{
const page = Number.parseInt(ctx.params['page']);
await renderZipImage(ctx,ctx.state.location.path,page);
this.get("/:page(\\d+)", async (ctx, next) => {
const page = Number.parseInt(ctx.params["page"]);
await renderZipImage(ctx, ctx.state.location.path, page);
});
this.get("/thumbnail", async (ctx,next)=>{
await renderZipImage(ctx,ctx.state.location.path,0);
this.get("/thumbnail", async (ctx, next) => {
await renderZipImage(ctx, ctx.state.location.path, 0);
});
}
}

View File

@ -1,5 +1,6 @@
import StreamZip, { ZipEntry } from 'node-stream-zip';
import {orderBy} from 'natural-orderby';
import { ReadStream } from 'fs';
export async function readZip(path : string):Promise<StreamZip>{
return new Promise((resolve,reject)=>{
@ -22,7 +23,7 @@ export function entriesByNaturalOrder(zip: StreamZip){
const ret = orderBy(Object.values(entries),v=>v.name);
return ret;
}
export async function createReadStreamFromZip(zip:StreamZip,entry: ZipEntry):Promise<NodeJS.ReadableStream>{
export async function createReadableStreamFromZip(zip:StreamZip,entry: ZipEntry):Promise<NodeJS.ReadableStream>{
return new Promise((resolve,reject)=>{
zip.stream(entry,(err, stream)=>{
if(stream !== undefined){
@ -35,7 +36,7 @@ export async function createReadStreamFromZip(zip:StreamZip,entry: ZipEntry):Pro
);
}
export async function readAllFromZip(zip:StreamZip,entry: ZipEntry):Promise<Buffer>{
const stream = await createReadStreamFromZip(zip,entry);
const stream = await createReadableStreamFromZip(zip,entry);
const chunks:Uint8Array[] = [];
return new Promise((resolve,reject)=>{
stream.on('data',(data)=>{chunks.push(data)});