import { MultiReader } from "../io/readers.ts";import { PartialReadError } from "../io/bufio.ts";import { assert } from "../_util/assert.ts";
type Reader = Deno.Reader;type Seeker = Deno.Seeker;
const recordSize = 512;const ustar = "ustar\u000000";
const initialChecksum = 8 * 32;
async function readBlock( reader: Deno.Reader, p: Uint8Array,): Promise<number | null> { let bytesRead = 0; while (bytesRead < p.length) { const rr = await reader.read(p.subarray(bytesRead)); if (rr === null) { if (bytesRead === 0) { return null; } else { throw new PartialReadError(); } } bytesRead += rr; } return bytesRead;}
class FileReader implements Reader { private file?: Deno.File;
constructor(private filePath: string) {}
public async read(p: Uint8Array): Promise<number | null> { if (!this.file) { this.file = await Deno.open(this.filePath, { read: true }); } const res = await Deno.read(this.file.rid, p); if (res === null) { Deno.close(this.file.rid); this.file = undefined; } return res; }}
function trim(buffer: Uint8Array): Uint8Array { const index = buffer.findIndex((v): boolean => v === 0); if (index < 0) return buffer; return buffer.subarray(0, index);}
function clean(length: number): Uint8Array { const buffer = new Uint8Array(length); buffer.fill(0, 0, length - 1); return buffer;}
function pad(num: number, bytes: number, base?: number): string { const numString = num.toString(base || 8); return "000000000000".substr(numString.length + 12 - bytes) + numString;}
enum FileTypes { "file" = 0, "link" = 1, "symlink" = 2, "character-device" = 3, "block-device" = 4, "directory" = 5, "fifo" = 6, "contiguous-file" = 7,}
const ustarStructure: Array<{ field: string; length: number }> = [ { field: "fileName", length: 100, }, { field: "fileMode", length: 8, }, { field: "uid", length: 8, }, { field: "gid", length: 8, }, { field: "fileSize", length: 12, }, { field: "mtime", length: 12, }, { field: "checksum", length: 8, }, { field: "type", length: 1, }, { field: "linkName", length: 100, }, { field: "ustar", length: 8, }, { field: "owner", length: 32, }, { field: "group", length: 32, }, { field: "majorNumber", length: 8, }, { field: "minorNumber", length: 8, }, { field: "fileNamePrefix", length: 155, }, { field: "padding", length: 12, },];
function formatHeader(data: TarData): Uint8Array { const encoder = new TextEncoder(), buffer = clean(512); let offset = 0; ustarStructure.forEach(function (value): void { const entry = encoder.encode(data[value.field as keyof TarData] || ""); buffer.set(entry, offset); offset += value.length; }); return buffer;}
function parseHeader(buffer: Uint8Array): { [key: string]: Uint8Array } { const data: { [key: string]: Uint8Array } = {}; let offset = 0; ustarStructure.forEach(function (value): void { const arr = buffer.subarray(offset, offset + value.length); data[value.field] = arr; offset += value.length; }); return data;}
interface TarHeader { [key: string]: Uint8Array;}
export interface TarData { fileName?: string; fileNamePrefix?: string; fileMode?: string; uid?: string; gid?: string; fileSize?: string; mtime?: string; checksum?: string; type?: string; ustar?: string; owner?: string; group?: string;}
export interface TarDataWithSource extends TarData { filePath?: string; reader?: Reader;}
export interface TarInfo { fileMode?: number; mtime?: number; uid?: number; gid?: number; owner?: string; group?: string; type?: string;}
export interface TarOptions extends TarInfo { filePath?: string;
reader?: Reader;
contentSize?: number;}
export interface TarMeta extends TarInfo { fileName: string; fileSize?: number;}
interface TarEntry extends TarMeta {}
export class Tar { data: TarDataWithSource[];
constructor() { this.data = []; }
async append(fn: string, opts: TarOptions): Promise<void> { if (typeof fn !== "string") { throw new Error("file name not specified"); } let fileName = fn; let fileNamePrefix: string | undefined; if (fileName.length > 100) { let i = fileName.length; while (i >= 0) { i = fileName.lastIndexOf("/", i); if (i <= 155) { fileNamePrefix = fileName.substr(0, i); fileName = fileName.substr(i + 1); break; } i--; } const errMsg = "ustar format does not allow a long file name (length of [file name" + "prefix] + / + [file name] must be shorter than 256 bytes)"; if (i < 0 || fileName.length > 100) { throw new Error(errMsg); } else { assert(fileNamePrefix != null); if (fileNamePrefix.length > 155) { throw new Error(errMsg); } } }
opts = opts || {};
let info: Deno.FileInfo | undefined; if (opts.filePath) { info = await Deno.stat(opts.filePath); if (info.isDirectory) { info.size = 0; opts.reader = new Deno.Buffer(); } }
const mode = opts.fileMode || (info && info.mode) || parseInt("777", 8) & 0xfff, mtime = Math.floor( opts.mtime ?? (info?.mtime ?? new Date()).valueOf() / 1000, ), uid = opts.uid || 0, gid = opts.gid || 0; if (typeof opts.owner === "string" && opts.owner.length >= 32) { throw new Error( "ustar format does not allow owner name length >= 32 bytes", ); } if (typeof opts.group === "string" && opts.group.length >= 32) { throw new Error( "ustar format does not allow group name length >= 32 bytes", ); }
const fileSize = info?.size ?? opts.contentSize; assert(fileSize != null, "fileSize must be set");
const type = opts.type ? FileTypes[opts.type as keyof typeof FileTypes] : (info?.isDirectory ? FileTypes.directory : FileTypes.file); const tarData: TarDataWithSource = { fileName, fileNamePrefix, fileMode: pad(mode, 7), uid: pad(uid, 7), gid: pad(gid, 7), fileSize: pad(fileSize, 11), mtime: pad(mtime, 11), checksum: " ", type: type.toString(), ustar, owner: opts.owner || "", group: opts.group || "", filePath: opts.filePath, reader: opts.reader, };
let checksum = 0; const encoder = new TextEncoder(); Object.keys(tarData) .filter((key): boolean => ["filePath", "reader"].indexOf(key) < 0) .forEach(function (key): void { checksum += encoder .encode(tarData[key as keyof TarData]) .reduce((p, c): number => p + c, 0); });
tarData.checksum = pad(checksum, 6) + "\u0000 "; this.data.push(tarData); }
getReader(): Reader { const readers: Reader[] = []; this.data.forEach((tarData): void => { let { reader } = tarData; const { filePath } = tarData; const headerArr = formatHeader(tarData); readers.push(new Deno.Buffer(headerArr)); if (!reader) { assert(filePath != null); reader = new FileReader(filePath); } readers.push(reader);
assert(tarData.fileSize != null, "fileSize must be set"); readers.push( new Deno.Buffer( clean( recordSize - (parseInt(tarData.fileSize, 8) % recordSize || recordSize), ), ), ); });
readers.push(new Deno.Buffer(clean(recordSize * 2))); return new MultiReader(...readers); }}
class TarEntry implements Reader { #header: TarHeader; #reader: Reader | (Reader & Deno.Seeker); #size: number; #read = 0; #consumed = false; #entrySize: number; constructor( meta: TarMeta, header: TarHeader, reader: Reader | (Reader & Deno.Seeker), ) { Object.assign(this, meta); this.#header = header; this.#reader = reader;
this.#size = this.fileSize || 0; const blocks = Math.ceil(this.#size / recordSize); this.#entrySize = blocks * recordSize; }
get consumed(): boolean { return this.#consumed; }
async read(p: Uint8Array): Promise<number | null> { const entryBytesLeft = this.#entrySize - this.#read; const bufSize = Math.min( p.length, entryBytesLeft, );
if (entryBytesLeft <= 0) return null;
const block = new Uint8Array(bufSize); const n = await readBlock(this.#reader, block); const bytesLeft = this.#size - this.#read;
this.#read += n || 0; if (n === null || bytesLeft <= 0) { if (null) this.#consumed = true; return null; }
const offset = bytesLeft < n ? bytesLeft : n; p.set(block.subarray(0, offset), 0);
return offset < 0 ? n - Math.abs(offset) : offset; }
async discard(): Promise<void> { if (this.#consumed) return; this.#consumed = true;
if (typeof (this.#reader as Seeker).seek === "function") { await (this.#reader as Seeker).seek( this.#entrySize - this.#read, Deno.SeekMode.Current, ); this.#read = this.#entrySize; } else { await Deno.readAll(this); } }}
export class Untar { reader: Reader; block: Uint8Array; #entry: TarEntry | undefined;
constructor(reader: Reader) { this.reader = reader; this.block = new Uint8Array(recordSize); }
#checksum = (header: Uint8Array): number => { let sum = initialChecksum; for (let i = 0; i < 512; i++) { if (i >= 148 && i < 156) { continue; } sum += header[i]; } return sum; };
#getHeader = async (): Promise<TarHeader | null> => { await readBlock(this.reader, this.block); const header = parseHeader(this.block);
const decoder = new TextDecoder(); const checksum = this.#checksum(this.block);
if (parseInt(decoder.decode(header.checksum), 8) !== checksum) { if (checksum === initialChecksum) { return null; } throw new Error("checksum error"); }
const magic = decoder.decode(header.ustar);
if (magic.indexOf("ustar")) { throw new Error(`unsupported archive format: ${magic}`); }
return header; };
#getMetadata = (header: TarHeader): TarMeta => { const decoder = new TextDecoder(); const meta: TarMeta = { fileName: decoder.decode(trim(header.fileName)), }; const fileNamePrefix = trim(header.fileNamePrefix); if (fileNamePrefix.byteLength > 0) { meta.fileName = decoder.decode(fileNamePrefix) + "/" + meta.fileName; } (["fileMode", "mtime", "uid", "gid"] as [ "fileMode", "mtime", "uid", "gid", ]).forEach((key): void => { const arr = trim(header[key]); if (arr.byteLength > 0) { meta[key] = parseInt(decoder.decode(arr), 8); } }); (["owner", "group", "type"] as ["owner", "group", "type"]).forEach( (key): void => { const arr = trim(header[key]); if (arr.byteLength > 0) { meta[key] = decoder.decode(arr); } }, );
meta.fileSize = parseInt(decoder.decode(header.fileSize), 8); meta.type = FileTypes[parseInt(meta.type!)] ?? meta.type;
return meta; };
async extract(): Promise<TarEntry | null> { if (this.#entry && !this.#entry.consumed) { await this.#entry.discard(); }
const header = await this.#getHeader(); if (header === null) return null;
const meta = this.#getMetadata(header);
this.#entry = new TarEntry(meta, header, this.reader);
return this.#entry; }
async *[Symbol.asyncIterator](): AsyncIterableIterator<TarEntry> { while (true) { const entry = await this.extract();
if (entry === null) return;
yield entry; } }}