Get rid of jszip and replace it with a custom library
This commit is contained in:
parent
36a69313cd
commit
438b9d4730
9 changed files with 377 additions and 102 deletions
|
@ -6,12 +6,37 @@ import { decryptFile } from "../DecryptFile";
|
|||
import { mediaFromContent } from "../../customisations/Media";
|
||||
import { formatFullDateNoDay } from "../../DateUtils";
|
||||
|
||||
type FileStream = {
|
||||
name: string,
|
||||
stream(): ReadableStream,
|
||||
};
|
||||
|
||||
export default abstract class Exporter {
|
||||
protected files: FileStream[];
|
||||
protected constructor(
|
||||
protected room: Room,
|
||||
protected exportType: exportTypes,
|
||||
protected exportOptions?: exportOptions,
|
||||
) {}
|
||||
) {
|
||||
this.files = [];
|
||||
}
|
||||
|
||||
protected addFile = (filePath: string, blob: Blob) => {
|
||||
const file = {
|
||||
name: filePath,
|
||||
stream: () => blob.stream(),
|
||||
}
|
||||
this.files.push(file);
|
||||
}
|
||||
|
||||
protected pumpToFileStream = async (reader: ReadableStreamDefaultReader, writer: WritableStreamDefaultWriter) => {
|
||||
const res = await reader.read();
|
||||
if (res.done) await writer.close();
|
||||
else {
|
||||
await writer.write(res.value);
|
||||
await this.pumpToFileStream(reader, writer)
|
||||
}
|
||||
}
|
||||
|
||||
protected setEventMetadata = (event: MatrixEvent) => {
|
||||
const client = MatrixClientPeg.get();
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import React from "react"
|
||||
import streamSaver from "streamsaver";
|
||||
import JSZip from "jszip";
|
||||
import Exporter from "./Exporter";
|
||||
import { mediaFromMxc } from "../../customisations/Media";
|
||||
import { Room } from "matrix-js-sdk/src/models/room";
|
||||
|
@ -25,9 +24,9 @@ import { exportTypes } from "./exportUtils";
|
|||
import { exportOptions } from "./exportUtils";
|
||||
import MatrixClientContext from "../../contexts/MatrixClientContext";
|
||||
import { MatrixClient } from "matrix-js-sdk";
|
||||
import zip from "./StreamToZip";
|
||||
|
||||
export default class HTMLExporter extends Exporter {
|
||||
protected zip: JSZip;
|
||||
protected avatars: Map<string, boolean>;
|
||||
protected permalinkCreator: RoomPermalinkCreator;
|
||||
protected matrixClient: MatrixClient;
|
||||
|
@ -36,7 +35,6 @@ export default class HTMLExporter extends Exporter {
|
|||
|
||||
constructor(room: Room, exportType: exportTypes, exportOptions: exportOptions) {
|
||||
super(room, exportType, exportOptions);
|
||||
this.zip = new JSZip();
|
||||
this.avatars = new Map<string, boolean>();
|
||||
this.matrixClient = MatrixClientPeg.get();
|
||||
this.permalinkCreator = new RoomPermalinkCreator(this.room);
|
||||
|
@ -59,7 +57,7 @@ export default class HTMLExporter extends Exporter {
|
|||
if (avatarUrl) {
|
||||
const image = await fetch(avatarUrl);
|
||||
blob = await image.blob();
|
||||
this.zip.file(avatarPath, blob);
|
||||
this.addFile(avatarPath, blob);
|
||||
}
|
||||
const avatar = (
|
||||
<BaseAvatar
|
||||
|
@ -217,7 +215,7 @@ export default class HTMLExporter extends Exporter {
|
|||
this.avatars.set(member.userId, true);
|
||||
const image = await fetch(avatarUrl);
|
||||
const blob = await image.blob();
|
||||
this.zip.file(`users/${member.userId.replace(/:/g, '-')}`, blob);
|
||||
this.addFile(`users/${member.userId.replace(/:/g, '-')}`, blob);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -285,7 +283,7 @@ export default class HTMLExporter extends Exporter {
|
|||
if (this.totalSize > this.exportOptions.maxSize - 1024 * 1024) {
|
||||
this.exportOptions.attachmentsIncluded = false;
|
||||
}
|
||||
this.zip.file(filePath, blob);
|
||||
this.addFile(filePath, blob);
|
||||
} else {
|
||||
const modifiedContent = {
|
||||
msgtype: "m.text",
|
||||
|
@ -334,53 +332,43 @@ export default class HTMLExporter extends Exporter {
|
|||
|
||||
const html = await this.createHTML(res);
|
||||
|
||||
this.zip.file("index.html", html);
|
||||
this.zip.file("css/style.css", exportCSS);
|
||||
this.zip.file("js/script.js", exportJS);
|
||||
this.addFile("index.html", new Blob([html]));
|
||||
this.addFile("css/style.css", new Blob([exportCSS]));
|
||||
this.addFile("js/script.js", new Blob([exportJS]));
|
||||
|
||||
|
||||
for (const iconName in exportIcons) {
|
||||
this.zip.file(`icons/${iconName}`, exportIcons[iconName]);
|
||||
this.addFile(`icons/${iconName}`, new Blob([exportIcons[iconName]]));
|
||||
}
|
||||
|
||||
const filename = `matrix-export-${formatFullDateNoDay(new Date())}.zip`;
|
||||
|
||||
console.info("HTML creation successful!");
|
||||
console.info("Generating a ZIP...");
|
||||
//Generate the zip file asynchronously
|
||||
const blob = await this.zip.generateAsync({ type: "blob" });
|
||||
|
||||
console.log("ZIP generated successfully");
|
||||
console.info("Writing to file system...")
|
||||
//Support for firefox browser
|
||||
streamSaver.WritableStream = ponyfill.WritableStream
|
||||
//Create a writable stream to the directory
|
||||
const fileStream = streamSaver.createWriteStream(filename, { size: blob.size });
|
||||
const fileStream = streamSaver.createWriteStream(filename);
|
||||
|
||||
const writer = fileStream.getWriter();
|
||||
const files = this.files;
|
||||
|
||||
// Here we chunk the blob into pieces of 10 MB, the size might be dynamically generated.
|
||||
// This can be used to keep track of the progress
|
||||
const sliceSize = 10 * 1e6;
|
||||
for (let fPointer = 0; fPointer < blob.size; fPointer += sliceSize) {
|
||||
const blobPiece = blob.slice(fPointer, fPointer + sliceSize);
|
||||
const reader = new FileReader();
|
||||
console.info("Generating a ZIP...");
|
||||
const readableZipStream = zip({
|
||||
start(ctrl) {
|
||||
for (const file of files) ctrl.enqueue(file);
|
||||
ctrl.close();
|
||||
},
|
||||
});
|
||||
|
||||
console.info("Writing to file system...")
|
||||
|
||||
const reader = readableZipStream.getReader()
|
||||
await this.pumpToFileStream(reader, writer);
|
||||
|
||||
const waiter = new Promise<void>((resolve) => {
|
||||
reader.onloadend = evt => {
|
||||
const arrayBufferNew: any = evt.target.result;
|
||||
const uint8ArrayNew = new Uint8Array(arrayBufferNew);
|
||||
writer.write(uint8ArrayNew);
|
||||
resolve();
|
||||
};
|
||||
reader.readAsArrayBuffer(blobPiece);
|
||||
});
|
||||
await waiter;
|
||||
}
|
||||
await writer.close();
|
||||
const exportEnd = performance.now();
|
||||
console.info(`Export Successful! Exported ${res.length} events in ${(exportEnd - fetchStart)/1000} seconds`);
|
||||
window.removeEventListener("beforeunload", this.onBeforeUnload);
|
||||
return blob;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,18 +5,23 @@ import { MatrixEvent } from "matrix-js-sdk/src/models/event";
|
|||
import { formatFullDateNoDay } from "../../DateUtils";
|
||||
import { _t } from "../../languageHandler";
|
||||
import * as ponyfill from "web-streams-polyfill/ponyfill"
|
||||
import "web-streams-polyfill/ponyfill" // to support blob.stream()
|
||||
import { haveTileForEvent } from "../../components/views/rooms/EventTile";
|
||||
import { exportTypes } from "./exportUtils";
|
||||
import { exportOptions } from "./exportUtils";
|
||||
import { textForEvent } from "../../TextForEvent";
|
||||
import zip from "./StreamToZip";
|
||||
|
||||
|
||||
export default class PlainTextExporter extends Exporter {
|
||||
protected totalSize: number;
|
||||
protected mediaOmitText: string;
|
||||
private readonly fileDir: string;
|
||||
|
||||
constructor(room: Room, exportType: exportTypes, exportOptions: exportOptions) {
|
||||
super(room, exportType, exportOptions);
|
||||
this.totalSize = 0;
|
||||
this.fileDir = `matrix-export-${formatFullDateNoDay(new Date())}`;
|
||||
this.mediaOmitText = !this.exportOptions.attachmentsIncluded
|
||||
? _t("Media omitted")
|
||||
: _t("Media omitted - file size limit exceeded");
|
||||
|
@ -61,8 +66,17 @@ export default class PlainTextExporter extends Exporter {
|
|||
return `<${rplName}${rplSource}> ${rplText}`;
|
||||
}
|
||||
|
||||
protected _textForEvent = (mxEv: MatrixEvent) => {
|
||||
protected _textForEvent = async (mxEv: MatrixEvent) => {
|
||||
const senderDisplayName = mxEv.sender && mxEv.sender.name ? mxEv.sender.name : mxEv.getSender();
|
||||
if (this.exportOptions.attachmentsIncluded && this.isAttachment(mxEv)) {
|
||||
const blob = await this.getMediaBlob(mxEv);
|
||||
this.totalSize += blob.size;
|
||||
const filePath = this.getFilePath(mxEv);
|
||||
this.addFile(filePath, blob);
|
||||
if (this.totalSize > this.exportOptions.maxSize - 1024 * 1024) {
|
||||
this.exportOptions.attachmentsIncluded = false;
|
||||
}
|
||||
}
|
||||
if (this.isReply(mxEv)) return senderDisplayName + ": " + this.textForReplyEvent(mxEv);
|
||||
else return textForEvent(mxEv);
|
||||
}
|
||||
|
@ -71,12 +85,18 @@ export default class PlainTextExporter extends Exporter {
|
|||
let content = "";
|
||||
for (const event of events) {
|
||||
if (!haveTileForEvent(event)) continue;
|
||||
const textForEvent = this._textForEvent(event);
|
||||
const textForEvent = await this._textForEvent(event);
|
||||
content += textForEvent && `${new Date(event.getTs()).toLocaleString()} - ${textForEvent}\n`;
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
protected getFileName = () => {
|
||||
if (this.exportOptions.attachmentsIncluded) {
|
||||
return `${this.room.name}.txt`;
|
||||
} else return `${this.fileDir}.txt`
|
||||
}
|
||||
|
||||
public async export() {
|
||||
console.info("Starting export process...");
|
||||
console.info("Fetching events...");
|
||||
|
@ -89,17 +109,30 @@ export default class PlainTextExporter extends Exporter {
|
|||
|
||||
const text = await this.createOutput(res);
|
||||
|
||||
const filename = `matrix-export-${formatFullDateNoDay(new Date())}.txt`;
|
||||
|
||||
console.info("Writing to a file...");
|
||||
//Support for firefox browser
|
||||
console.info("Writing to the file system...");
|
||||
streamSaver.WritableStream = ponyfill.WritableStream
|
||||
//Create a writable stream to the directory
|
||||
const fileStream = streamSaver.createWriteStream(filename);
|
||||
const writer = fileStream.getWriter();
|
||||
const data = new TextEncoder().encode(text);
|
||||
await writer.write(data);
|
||||
await writer.close();
|
||||
|
||||
const files = this.files;
|
||||
if (files.length) {
|
||||
this.addFile(this.getFileName(), new Blob([text]));
|
||||
const fileStream = streamSaver.createWriteStream(`${this.fileDir}.zip`);
|
||||
const readableZipStream = zip({
|
||||
start(ctrl) {
|
||||
for (const file of files) ctrl.enqueue(file);
|
||||
ctrl.close();
|
||||
},
|
||||
});
|
||||
const writer = fileStream.getWriter()
|
||||
const reader = readableZipStream.getReader()
|
||||
await this.pumpToFileStream(reader, writer);
|
||||
} else {
|
||||
const fileStream = streamSaver.createWriteStream(`${this.fileDir}.txt`);
|
||||
const writer = fileStream.getWriter()
|
||||
const data = new TextEncoder().encode(text);
|
||||
await writer.write(data);
|
||||
await writer.close();
|
||||
}
|
||||
|
||||
const exportEnd = performance.now();
|
||||
console.info(`Export Successful! Exported ${res.length} events in ${(exportEnd - fetchStart)/1000} seconds`);
|
||||
window.removeEventListener("beforeunload", this.onBeforeUnload);
|
||||
|
|
|
@ -1,17 +1,278 @@
|
|||
/*Not to be reviewed now*/
|
||||
// class fileCheckSum {
|
||||
// protected CRC32: number;
|
||||
// public table: any[];
|
||||
// constructor() {
|
||||
// this.CRC32 = -1
|
||||
// }
|
||||
/* global ReadableStream */
|
||||
|
||||
// protected append(data: any[]) {
|
||||
// let crc = this.CRC32 | 0;
|
||||
// const table = this.table;
|
||||
// for (let offset = 0, len = data.length | 0; offset < len; offset++) {
|
||||
// crc = (crc >>> 8) ^ table[(crc ^ data[offset]) & 0xFF]
|
||||
// }
|
||||
// this.CRC32 = crc
|
||||
// }
|
||||
// }
|
||||
type TypedArray =
|
||||
| Int8Array
|
||||
| Uint8Array
|
||||
| Int16Array
|
||||
| Uint16Array
|
||||
| Int32Array
|
||||
| Uint32Array
|
||||
| Uint8ClampedArray
|
||||
| Float32Array
|
||||
| Float64Array;
|
||||
|
||||
|
||||
/**
|
||||
* 32-bit cyclic redundancy check, or CRC-32 - checksum
|
||||
*/
|
||||
class Crc32 {
|
||||
crc: number;
|
||||
table: any;
|
||||
constructor() {
|
||||
this.crc = -1;
|
||||
this.table = (() => {
|
||||
let i
|
||||
let j
|
||||
let t;
|
||||
const table = [];
|
||||
|
||||
for (i = 0; i < 256; i++) {
|
||||
t = i;
|
||||
for (j = 0; j < 8; j++) {
|
||||
t = (t & 1)
|
||||
? (t >>> 1) ^ 0xEDB88320
|
||||
: t >>> 1;
|
||||
}
|
||||
table[i] = t;
|
||||
}
|
||||
return table
|
||||
})()
|
||||
}
|
||||
|
||||
append(data: TypedArray) {
|
||||
let crc = this.crc | 0;
|
||||
const table = this.table;
|
||||
for (let offset = 0, len = data.length | 0; offset < len; offset++) {
|
||||
crc = (crc >>> 8) ^ table[(crc ^ data[offset]) & 0xFF];
|
||||
}
|
||||
this.crc = crc;
|
||||
}
|
||||
|
||||
get() {
|
||||
return ~this.crc;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
type DataHelper = {
|
||||
array: Uint8Array,
|
||||
view: DataView,
|
||||
}
|
||||
|
||||
const getDataHelper = (byteLength: number): DataHelper => {
|
||||
const uint8 = new Uint8Array(byteLength)
|
||||
return {
|
||||
array: uint8,
|
||||
view: new DataView(uint8.buffer),
|
||||
};
|
||||
}
|
||||
|
||||
type FileLike = File & {
|
||||
directory: string,
|
||||
comment: string,
|
||||
stream(): ReadableStream,
|
||||
}
|
||||
|
||||
type ZipObj = {
|
||||
crc?: Crc32,
|
||||
uncompressedLength: number,
|
||||
compressedLength: number,
|
||||
ctrl: ReadableStreamDefaultController,
|
||||
writeFooter: Function,
|
||||
writeHeader: Function,
|
||||
reader?: ReadableStreamDefaultReader,
|
||||
offset: number
|
||||
header?: DataHelper,
|
||||
fileLike: FileLike,
|
||||
level: number,
|
||||
directory: boolean,
|
||||
}
|
||||
|
||||
const pump = (zipObj: ZipObj) => zipObj.reader ? zipObj.reader.read().then(chunk => {
|
||||
if (zipObj.crc) {
|
||||
if (chunk.done) return zipObj.writeFooter();
|
||||
const outputData = chunk.value;
|
||||
zipObj.crc.append(outputData);
|
||||
zipObj.uncompressedLength += outputData.length;
|
||||
zipObj.compressedLength += outputData.length;
|
||||
zipObj.ctrl.enqueue(outputData);
|
||||
} else {
|
||||
throw new Error('Missing zipObj.crc');
|
||||
}
|
||||
}) : undefined;
|
||||
|
||||
export default function ZIP(underlyingSource: UnderlyingSource) {
|
||||
const files = Object.create(null);
|
||||
const filenames: string[] = [];
|
||||
const encoder = new TextEncoder();
|
||||
let offset = 0;
|
||||
let activeZipIndex = 0;
|
||||
let ctrl: ReadableStreamDefaultController;
|
||||
let activeZipObject: ZipObj;
|
||||
let closed: boolean;
|
||||
|
||||
function next() {
|
||||
activeZipIndex++;
|
||||
activeZipObject = files[filenames[activeZipIndex]];
|
||||
if (activeZipObject) processNextChunk();
|
||||
else if (closed) closeZip();
|
||||
}
|
||||
|
||||
const zipWriter: ReadableStreamDefaultController = {
|
||||
desiredSize: null,
|
||||
|
||||
error(err) {
|
||||
console.error(err)
|
||||
},
|
||||
|
||||
enqueue(fileLike: FileLike) {
|
||||
if (closed) {
|
||||
throw new TypeError(
|
||||
"Cannot enqueue a chunk into a readable stream that is closed or has been requested to be closed",
|
||||
);
|
||||
}
|
||||
|
||||
let name = fileLike.name.trim();
|
||||
const date = new Date(typeof fileLike.lastModified === 'undefined' ? Date.now() : fileLike.lastModified);
|
||||
|
||||
if (fileLike.directory && !name.endsWith('/')) name += '/';
|
||||
if (files[name]) throw new Error('File already exists.');
|
||||
|
||||
const nameBuf = encoder.encode(name);
|
||||
filenames.push(name);
|
||||
|
||||
const zipObject: ZipObj = files[name] = {
|
||||
level: 0,
|
||||
ctrl,
|
||||
directory: !!fileLike.directory,
|
||||
nameBuf,
|
||||
comment: encoder.encode(fileLike.comment || ''),
|
||||
compressedLength: 0,
|
||||
uncompressedLength: 0,
|
||||
offset,
|
||||
|
||||
writeHeader() {
|
||||
const header = getDataHelper(26)
|
||||
const data = getDataHelper(30 + nameBuf.length)
|
||||
|
||||
zipObject.offset = offset;
|
||||
zipObject.header = header;
|
||||
|
||||
if (zipObject.level !== 0 && !zipObject.directory) {
|
||||
header.view.setUint16(4, 0x0800);
|
||||
}
|
||||
|
||||
header.view.setUint32(0, 0x14000808);
|
||||
header.view.setUint16(
|
||||
6,
|
||||
(((date.getHours() << 6) | date.getMinutes()) << 5) | (date.getSeconds() / 2),
|
||||
true,
|
||||
);
|
||||
header.view.setUint16(
|
||||
8,
|
||||
((((date.getFullYear() - 1980) << 4) | (date.getMonth() + 1)) << 5) |
|
||||
date.getDate(),
|
||||
true,
|
||||
);
|
||||
header.view.setUint16(22, nameBuf.length, true);
|
||||
data.view.setUint32(0, 0x504b0304);
|
||||
data.array.set(header.array, 4);
|
||||
data.array.set(nameBuf, 30);
|
||||
offset += data.array.length;
|
||||
ctrl.enqueue(data.array);
|
||||
},
|
||||
|
||||
writeFooter() {
|
||||
const footer = getDataHelper(16);
|
||||
footer.view.setUint32(0, 0x504b0708);
|
||||
|
||||
if (zipObject.crc && zipObject.header) {
|
||||
zipObject.header.view.setUint32(10, zipObject.crc.get(), true);
|
||||
zipObject.header.view.setUint32(14, zipObject.compressedLength, true);
|
||||
zipObject.header.view.setUint32(18, zipObject.uncompressedLength, true);
|
||||
footer.view.setUint32(4, zipObject.crc.get(), true);
|
||||
footer.view.setUint32(8, zipObject.compressedLength, true);
|
||||
footer.view.setUint32(12, zipObject.uncompressedLength, true);
|
||||
}
|
||||
|
||||
ctrl.enqueue(footer.array);
|
||||
offset += zipObject.compressedLength + 16;
|
||||
next()
|
||||
},
|
||||
fileLike,
|
||||
}
|
||||
|
||||
if (!activeZipObject) {
|
||||
activeZipObject = zipObject;
|
||||
processNextChunk();
|
||||
}
|
||||
},
|
||||
|
||||
close() {
|
||||
if (closed) {
|
||||
throw new TypeError(
|
||||
"Cannot close a readable stream that has already been requested to be closed",
|
||||
);
|
||||
}
|
||||
if (!activeZipObject) closeZip();
|
||||
closed = true;
|
||||
},
|
||||
}
|
||||
|
||||
function closeZip() {
|
||||
let length = 0;
|
||||
let index = 0
|
||||
let indexFilename;
|
||||
let file;
|
||||
|
||||
for (indexFilename = 0; indexFilename < filenames.length; indexFilename++) {
|
||||
file = files[filenames[indexFilename]];
|
||||
length += 46 + file.nameBuf.length + file.comment.length;
|
||||
}
|
||||
const data = getDataHelper(length + 22)
|
||||
for (indexFilename = 0; indexFilename < filenames.length; indexFilename++) {
|
||||
file = files[filenames[indexFilename]];
|
||||
data.view.setUint32(index, 0x504b0102);
|
||||
data.view.setUint16(index + 4, 0x1400);
|
||||
data.array.set(file.header.array, index + 6);
|
||||
data.view.setUint16(index + 32, file.comment.length, true);
|
||||
if (file.directory) {
|
||||
data.view.setUint8(index + 38, 0x10);
|
||||
}
|
||||
data.view.setUint32(index + 42, file.offset, true);
|
||||
data.array.set(file.nameBuf, index + 46);
|
||||
data.array.set(file.comment, index + 46 + file.nameBuf.length);
|
||||
index += 46 + file.nameBuf.length + file.comment.length;
|
||||
}
|
||||
data.view.setUint32(index, 0x504b0506);
|
||||
data.view.setUint16(index + 8, filenames.length, true);
|
||||
data.view.setUint16(index + 10, filenames.length, true);
|
||||
data.view.setUint32(index + 12, length, true);
|
||||
data.view.setUint32(index + 16, offset, true);
|
||||
ctrl.enqueue(data.array);
|
||||
ctrl.close();
|
||||
}
|
||||
|
||||
function processNextChunk() {
|
||||
if (!activeZipObject) return;
|
||||
if (activeZipObject.reader) return pump(activeZipObject);
|
||||
if (activeZipObject.fileLike.stream) {
|
||||
activeZipObject.crc = new Crc32();
|
||||
activeZipObject.reader = activeZipObject.fileLike.stream().getReader();
|
||||
activeZipObject.writeHeader();
|
||||
} else next();
|
||||
}
|
||||
|
||||
return new ReadableStream({
|
||||
start: c => {
|
||||
ctrl = c;
|
||||
underlyingSource.start && Promise.resolve(underlyingSource.start(zipWriter));
|
||||
},
|
||||
pull() {
|
||||
return processNextChunk() || (
|
||||
underlyingSource.pull &&
|
||||
Promise.resolve(underlyingSource.pull(zipWriter))
|
||||
)
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue