: str_replace(): Passing null to parameter #2 ($replace) of type array|string is deprecated in
function validateFontName(fontFamily, mustWarn = false) {
const m = /^("|').*("|')$/.exec(fontFamily);
if (m && m[1] === m[2]) {
const re = new RegExp(`[^\\\\]${m[1]}`);
if (re.test(fontFamily.slice(1, -1))) {
warn(`FontFamily contains unescaped ${m[1]}: ${fontFamily}.`);
for (const ident of fontFamily.split(/[ \t]+/)) {
if (/^(\d|(-(\d|-)))/.test(ident) || !/^[\w-\\]+$/.test(ident)) {
warn(`FontFamily contains invalid <custom-ident>: ${fontFamily}.`);
function validateCSSFont(cssFontInfo) {
const DEFAULT_CSS_FONT_OBLIQUE = "14";
const DEFAULT_CSS_FONT_WEIGHT = "400";
const CSS_FONT_WEIGHT_VALUES = new Set(["100", "200", "300", "400", "500", "600", "700", "800", "900", "1000", "normal", "bold", "bolder", "lighter"]);
if (!validateFontName(fontFamily, true)) {
const weight = fontWeight ? fontWeight.toString() : "";
cssFontInfo.fontWeight = CSS_FONT_WEIGHT_VALUES.has(weight) ? weight : DEFAULT_CSS_FONT_WEIGHT;
const angle = parseFloat(italicAngle);
cssFontInfo.italicAngle = isNaN(angle) || angle < -90 || angle > 90 ? DEFAULT_CSS_FONT_OBLIQUE : italicAngle.toString();
function recoverJsURL(str) {
const URL_OPEN_METHODS = ["app.launchURL", "window.open", "xfa.host.gotoURL"];
const regex = new RegExp("^\\s*(" + URL_OPEN_METHODS.join("|").replaceAll(".", "\\.") + ")\\((?:'|\")([^'\"]*)(?:'|\")(?:,\\s*(\\w+)\\)|\\))", "i");
const jsUrl = regex.exec(str);
if (jsUrl[3] === "true" && jsUrl[1] === "app.launchURL") {
function numberToString(value) {
if (Number.isInteger(value)) {
const roundedValue = Math.round(value * 100);
if (roundedValue % 100 === 0) {
return (roundedValue / 100).toString();
if (roundedValue % 10 === 0) {
function getNewAnnotationsMap(annotationStorage) {
if (!annotationStorage) {
const newAnnotationsByPage = new Map();
for (const [key, value] of annotationStorage) {
if (!key.startsWith(AnnotationEditorPrefix)) {
let annotations = newAnnotationsByPage.get(value.pageIndex);
newAnnotationsByPage.set(value.pageIndex, annotations);
return newAnnotationsByPage.size > 0 ? newAnnotationsByPage : null;
return /^[\x00-\x7F]*$/.test(str);
function stringToUTF16HexString(str) {
for (let i = 0, ii = str.length; i < ii; i++) {
const char = str.charCodeAt(i);
buf.push((char >> 8 & 0xff).toString(16).padStart(2, "0"), (char & 0xff).toString(16).padStart(2, "0"));
function stringToUTF16String(str, bigEndian = false) {
for (let i = 0, ii = str.length; i < ii; i++) {
const char = str.charCodeAt(i);
buf.push(String.fromCharCode(char >> 8 & 0xff), String.fromCharCode(char & 0xff));
function getRotationMatrix(rotation, width, height) {
return [0, 1, -1, 0, width, 0];
return [-1, 0, 0, -1, width, height];
return [0, -1, 1, 0, 0, height];
throw new Error("Invalid rotation");
function getSizeInBytes(x) {
return Math.ceil(Math.ceil(Math.log2(1 + x)) / 8);
;// CONCATENATED MODULE: ./src/core/stream.js
class Stream extends BaseStream {
constructor(arrayBuffer, start, length, dict) {
this.bytes = arrayBuffer instanceof Uint8Array ? arrayBuffer : new Uint8Array(arrayBuffer);
this.end = start + length || this.bytes.length;
return this.end - this.start;
return this.length === 0;
if (this.pos >= this.end) {
return this.bytes[this.pos++];
const bytes = this.bytes;
return bytes.subarray(pos, strEnd);
return bytes.subarray(pos, end);
getByteRange(begin, end) {
return this.bytes.subarray(begin, end);
makeSubStream(start, length, dict = null) {
return new Stream(this.bytes.buffer, start, length, dict);
class StringStream extends Stream {
super(stringToBytes(str));
class NullStream extends Stream {
super(new Uint8Array(0));
;// CONCATENATED MODULE: ./src/core/chunked_stream.js
class ChunkedStream extends Stream {
constructor(length, chunkSize, manager) {
super(new Uint8Array(length), 0, length, null);
this.chunkSize = chunkSize;
this._loadedChunks = new Set();
this.numChunks = Math.ceil(length / chunkSize);
this.progressiveDataLength = 0;
this.lastSuccessfulEnsureByteChunk = -1;
for (let chunk = 0, n = this.numChunks; chunk < n; ++chunk) {
if (!this._loadedChunks.has(chunk)) {
return this._loadedChunks.size;
return this.numChunksLoaded === this.numChunks;
onReceiveData(begin, chunk) {
const chunkSize = this.chunkSize;
if (begin % chunkSize !== 0) {
throw new Error(`Bad begin offset: ${begin}`);
const end = begin + chunk.byteLength;
if (end % chunkSize !== 0 && end !== this.bytes.length) {
throw new Error(`Bad end offset: ${end}`);
this.bytes.set(new Uint8Array(chunk), begin);
const beginChunk = Math.floor(begin / chunkSize);
const endChunk = Math.floor((end - 1) / chunkSize) + 1;
for (let curChunk = beginChunk; curChunk < endChunk; ++curChunk) {
this._loadedChunks.add(curChunk);
onReceiveProgressiveData(data) {
let position = this.progressiveDataLength;
const beginChunk = Math.floor(position / this.chunkSize);
this.bytes.set(new Uint8Array(data), position);
position += data.byteLength;
this.progressiveDataLength = position;
const endChunk = position >= this.end ? this.numChunks : Math.floor(position / this.chunkSize);
for (let curChunk = beginChunk; curChunk < endChunk; ++curChunk) {
this._loadedChunks.add(curChunk);
if (pos < this.progressiveDataLength) {
const chunk = Math.floor(pos / this.chunkSize);
if (chunk > this.numChunks) {
if (chunk === this.lastSuccessfulEnsureByteChunk) {
if (!this._loadedChunks.has(chunk)) {
throw new MissingDataException(pos, pos + 1);
this.lastSuccessfulEnsureByteChunk = chunk;
ensureRange(begin, end) {
if (end <= this.progressiveDataLength) {
const beginChunk = Math.floor(begin / this.chunkSize);
if (beginChunk > this.numChunks) {
const endChunk = Math.min(Math.floor((end - 1) / this.chunkSize) + 1, this.numChunks);
for (let chunk = beginChunk; chunk < endChunk; ++chunk) {
if (!this._loadedChunks.has(chunk)) {
throw new MissingDataException(begin, end);
nextEmptyChunk(beginChunk) {
const numChunks = this.numChunks;
for (let i = 0; i < numChunks; ++i) {
const chunk = (beginChunk + i) % numChunks;
if (!this._loadedChunks.has(chunk)) {
return this._loadedChunks.has(chunk);
if (pos >= this.progressiveDataLength) {
return this.bytes[this.pos++];
const bytes = this.bytes;
if (strEnd > this.progressiveDataLength) {
this.ensureRange(pos, strEnd);
return bytes.subarray(pos, strEnd);
if (end > this.progressiveDataLength) {
this.ensureRange(pos, end);
return bytes.subarray(pos, end);
getByteRange(begin, end) {
if (end > this.progressiveDataLength) {
this.ensureRange(begin, end);
return this.bytes.subarray(begin, end);
makeSubStream(start, length, dict = null) {
if (start + length > this.progressiveDataLength) {
this.ensureRange(start, start + length);
} else if (start >= this.progressiveDataLength) {
function ChunkedStreamSubstream() {}
ChunkedStreamSubstream.prototype = Object.create(this);
ChunkedStreamSubstream.prototype.getMissingChunks = function () {
const chunkSize = this.chunkSize;
const beginChunk = Math.floor(this.start / chunkSize);
const endChunk = Math.floor((this.end - 1) / chunkSize) + 1;
const missingChunks = [];
for (let chunk = beginChunk; chunk < endChunk; ++chunk) {
if (!this._loadedChunks.has(chunk)) {
missingChunks.push(chunk);
Object.defineProperty(ChunkedStreamSubstream.prototype, "isDataLoaded", {
if (this.numChunksLoaded === this.numChunks) {
return this.getMissingChunks().length === 0;
const subStream = new ChunkedStreamSubstream();
subStream.pos = subStream.start = start;
subStream.end = start + length || this.end;
class ChunkedStreamManager {
constructor(pdfNetworkStream, args) {
this.length = args.length;
this.chunkSize = args.rangeChunkSize;
this.stream = new ChunkedStream(this.length, this.chunkSize, this);
this.pdfNetworkStream = pdfNetworkStream;
this.disableAutoFetch = args.disableAutoFetch;
this.msgHandler = args.msgHandler;
this._chunksNeededByRequest = new Map();
this._requestsByChunk = new Map();
this._promisesByRequest = new Map();
this.progressiveDataLength = 0;
this._loadedStreamCapability = Promise.withResolvers();
sendRequest(begin, end) {
const rangeReader = this.pdfNetworkStream.getRangeReader(begin, end);
if (!rangeReader.isStreamingSupported) {
rangeReader.onProgress = this.onProgress.bind(this);
return new Promise((resolve, reject) => {
const chunkData = arrayBuffersToBytes(chunks);
loaded += value.byteLength;
if (rangeReader.isStreamingSupported) {
rangeReader.read().then(readChunk, reject);
rangeReader.read().then(readChunk, reject);
requestAllChunks(noFetch = false) {
const missingChunks = this.stream.getMissingChunks();
this._requestChunks(missingChunks);
return this._loadedStreamCapability.promise;
const requestId = this.currRequestId++;
const chunksNeeded = new Set();
this._chunksNeededByRequest.set(requestId, chunksNeeded);
for (const chunk of chunks) {
if (!this.stream.hasChunk(chunk)) {
if (chunksNeeded.size === 0) {
return Promise.resolve();
const capability = Promise.withResolvers();
this._promisesByRequest.set(requestId, capability);
const chunksToRequest = [];
for (const chunk of chunksNeeded) {
let requestIds = this._requestsByChunk.get(chunk);
this._requestsByChunk.set(chunk, requestIds);
chunksToRequest.push(chunk);
requestIds.push(requestId);
if (chunksToRequest.length > 0) {
const groupedChunksToRequest = this.groupChunks(chunksToRequest);
for (const groupedChunk of groupedChunksToRequest) {
const begin = groupedChunk.beginChunk * this.chunkSize;
const end = Math.min(groupedChunk.endChunk * this.chunkSize, this.length);
this.sendRequest(begin, end).catch(capability.reject);