first commit
Some checks failed
Types tests / Test (lts/*) (push) Has been cancelled
Lint / Lint (lts/*) (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
CI / Test (20) (push) Has been cancelled
CI / Test (22) (push) Has been cancelled
CI / Test (24) (push) Has been cancelled
Some checks failed
Types tests / Test (lts/*) (push) Has been cancelled
Lint / Lint (lts/*) (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
CI / Test (20) (push) Has been cancelled
CI / Test (22) (push) Has been cancelled
CI / Test (24) (push) Has been cancelled
This commit is contained in:
5261
test/unit/annotation_spec.js
Normal file
5261
test/unit/annotation_spec.js
Normal file
File diff suppressed because it is too large
Load Diff
112
test/unit/annotation_storage_spec.js
Normal file
112
test/unit/annotation_storage_spec.js
Normal file
@@ -0,0 +1,112 @@
|
||||
/* Copyright 2020 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { AnnotationStorage } from "../../src/display/annotation_storage.js";
|
||||
|
||||
describe("AnnotationStorage", function () {
|
||||
describe("GetOrDefaultValue", function () {
|
||||
it("should get and set a new value in the annotation storage", function () {
|
||||
const annotationStorage = new AnnotationStorage();
|
||||
let value = annotationStorage.getValue("123A", {
|
||||
value: "hello world",
|
||||
}).value;
|
||||
expect(value).toEqual("hello world");
|
||||
|
||||
annotationStorage.setValue("123A", {
|
||||
value: "hello world",
|
||||
});
|
||||
|
||||
// the second argument is the default value to use
|
||||
// if the key isn't in the storage
|
||||
value = annotationStorage.getValue("123A", {
|
||||
value: "an other string",
|
||||
}).value;
|
||||
expect(value).toEqual("hello world");
|
||||
});
|
||||
|
||||
it("should get set values and default ones in the annotation storage", function () {
|
||||
const annotationStorage = new AnnotationStorage();
|
||||
annotationStorage.setValue("123A", {
|
||||
value: "hello world",
|
||||
hello: "world",
|
||||
});
|
||||
|
||||
const result = annotationStorage.getValue("123A", {
|
||||
value: "an other string",
|
||||
world: "hello",
|
||||
});
|
||||
expect(result).toEqual({
|
||||
value: "hello world",
|
||||
hello: "world",
|
||||
world: "hello",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("SetValue", function () {
|
||||
it("should set a new value in the annotation storage", function () {
|
||||
const annotationStorage = new AnnotationStorage();
|
||||
annotationStorage.setValue("123A", { value: "an other string" });
|
||||
const { value } = annotationStorage.getRawValue("123A");
|
||||
expect(value).toEqual("an other string");
|
||||
});
|
||||
|
||||
it("should call onSetModified() if value is changed", function () {
|
||||
const annotationStorage = new AnnotationStorage();
|
||||
let called = false;
|
||||
const callback = function () {
|
||||
called = true;
|
||||
};
|
||||
annotationStorage.onSetModified = callback;
|
||||
|
||||
annotationStorage.setValue("asdf", { value: "original" });
|
||||
expect(called).toBe(true);
|
||||
|
||||
// changing value
|
||||
annotationStorage.setValue("asdf", { value: "modified" });
|
||||
expect(called).toBe(true);
|
||||
|
||||
// not changing value
|
||||
called = false;
|
||||
annotationStorage.setValue("asdf", { value: "modified" });
|
||||
expect(called).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ResetModified", function () {
|
||||
it("should call onResetModified() if set", function () {
|
||||
const annotationStorage = new AnnotationStorage();
|
||||
let called = false;
|
||||
const callback = function () {
|
||||
called = true;
|
||||
};
|
||||
annotationStorage.onResetModified = callback;
|
||||
annotationStorage.setValue("asdf", { value: "original" });
|
||||
annotationStorage.resetModified();
|
||||
expect(called).toBe(true);
|
||||
called = false;
|
||||
|
||||
// not changing value
|
||||
annotationStorage.setValue("asdf", { value: "original" });
|
||||
annotationStorage.resetModified();
|
||||
expect(called).toBe(false);
|
||||
|
||||
// changing value
|
||||
annotationStorage.setValue("asdf", { value: "modified" });
|
||||
annotationStorage.resetModified();
|
||||
expect(called).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
5305
test/unit/api_spec.js
Normal file
5305
test/unit/api_spec.js
Normal file
File diff suppressed because it is too large
Load Diff
41
test/unit/app_options_spec.js
Normal file
41
test/unit/app_options_spec.js
Normal file
@@ -0,0 +1,41 @@
|
||||
/* Copyright 2024 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { AppOptions, OptionKind } from "../../web/app_options.js";
|
||||
import { objectSize } from "../../src/shared/util.js";
|
||||
|
||||
describe("AppOptions", function () {
|
||||
it("checks that getAll returns data, for every OptionKind", function () {
|
||||
const KIND_NAMES = ["BROWSER", "VIEWER", "API", "WORKER", "PREFERENCE"];
|
||||
|
||||
for (const name of KIND_NAMES) {
|
||||
const kind = OptionKind[name];
|
||||
expect(typeof kind).toEqual("number");
|
||||
|
||||
const options = AppOptions.getAll(kind);
|
||||
expect(objectSize(options)).toBeGreaterThan(0);
|
||||
}
|
||||
});
|
||||
|
||||
it('checks that the number of "PREFERENCE" options does *not* exceed the maximum in mozilla-central', function () {
|
||||
// If the following constant is updated then you *MUST* make the same change
|
||||
// in mozilla-central as well to ensure that preference-fetching works; see
|
||||
// https://searchfox.org/mozilla-central/source/toolkit/components/pdfjs/content/PdfStreamConverter.sys.mjs
|
||||
const MAX_NUMBER_OF_PREFS = 50;
|
||||
|
||||
const options = AppOptions.getAll(OptionKind.PREFERENCE);
|
||||
expect(objectSize(options)).toBeLessThanOrEqual(MAX_NUMBER_OF_PREFS);
|
||||
});
|
||||
});
|
||||
207
test/unit/autolinker_spec.js
Normal file
207
test/unit/autolinker_spec.js
Normal file
@@ -0,0 +1,207 @@
|
||||
/* Copyright 2025 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Autolinker } from "../../web/autolinker.js";
|
||||
|
||||
function testLinks(links) {
|
||||
const matches = Autolinker.findLinks(links.map(link => link[0]).join("\n"));
|
||||
expect(matches.length).toEqual(links.length);
|
||||
for (let i = 0; i < links.length; i++) {
|
||||
expect(matches[i].url).toEqual(links[i][1]);
|
||||
}
|
||||
}
|
||||
|
||||
describe("autolinker", function () {
|
||||
it("should correctly find URLs", function () {
|
||||
const [matched] = Autolinker.findLinks("http://www.example.com");
|
||||
expect(matched.url).toEqual("http://www.example.com/");
|
||||
});
|
||||
|
||||
it("should correctly find simple valid URLs", function () {
|
||||
testLinks([
|
||||
[
|
||||
"http://subdomain.example.com/path/to/page?query=param",
|
||||
"http://subdomain.example.com/path/to/page?query=param",
|
||||
],
|
||||
[
|
||||
"www.example.com/path/to/resource",
|
||||
"http://www.example.com/path/to/resource",
|
||||
],
|
||||
[
|
||||
"http://example.com/path?query=value#fragment",
|
||||
"http://example.com/path?query=value#fragment",
|
||||
],
|
||||
]);
|
||||
});
|
||||
|
||||
it("should correctly find emails", function () {
|
||||
testLinks([
|
||||
["mailto:username@example.com", "mailto:username@example.com"],
|
||||
[
|
||||
"mailto:someone@subdomain.example.com",
|
||||
"mailto:someone@subdomain.example.com",
|
||||
],
|
||||
["peter@abc.de", "mailto:peter@abc.de"],
|
||||
["red.teddy.b@abc.com", "mailto:red.teddy.b@abc.com"],
|
||||
[
|
||||
"abc_@gmail.com", // '_' is ok before '@'.
|
||||
"mailto:abc_@gmail.com",
|
||||
],
|
||||
[
|
||||
"dummy-hi@gmail.com", // '-' is ok in user name.
|
||||
"mailto:dummy-hi@gmail.com",
|
||||
],
|
||||
[
|
||||
"a..df@gmail.com", // Stop at consecutive '.'.
|
||||
"mailto:a..df@gmail.com",
|
||||
],
|
||||
[
|
||||
".john@yahoo.com", // Remove heading '.'.
|
||||
"mailto:john@yahoo.com",
|
||||
],
|
||||
[
|
||||
"abc@xyz.org?/", // Trim ending invalid chars.
|
||||
"mailto:abc@xyz.org",
|
||||
],
|
||||
[
|
||||
"fan{abc@xyz.org", // Trim beginning invalid chars.
|
||||
"mailto:abc@xyz.org",
|
||||
],
|
||||
[
|
||||
"fan@g.com..", // Trim the ending periods.
|
||||
"mailto:fan@g.com",
|
||||
],
|
||||
[
|
||||
"CAP.cap@Gmail.Com", // Keep the original case.
|
||||
"mailto:CAP.cap@Gmail.Com",
|
||||
],
|
||||
["partl@mail.boku.ac.at", "mailto:partl@mail.boku.ac.at"],
|
||||
["Irene.Hyna@bmwf.ac.at", "mailto:Irene.Hyna@bmwf.ac.at"],
|
||||
["<hi@foo.bar.baz>", "mailto:hi@foo.bar.baz"],
|
||||
]);
|
||||
});
|
||||
|
||||
it("should correctly handle complex or edge cases", function () {
|
||||
testLinks([
|
||||
[
|
||||
"https://example.com/path/to/page?query=param&another=val#section",
|
||||
"https://example.com/path/to/page?query=param&another=val#section",
|
||||
],
|
||||
[
|
||||
"www.example.com/resource/(parentheses)-allowed/",
|
||||
"http://www.example.com/resource/(parentheses)-allowed/",
|
||||
],
|
||||
[
|
||||
"http://example.com/path_with_underscores",
|
||||
"http://example.com/path_with_underscores",
|
||||
],
|
||||
[
|
||||
"http://www.example.com:8080/port/test",
|
||||
"http://www.example.com:8080/port/test",
|
||||
],
|
||||
[
|
||||
"https://example.com/encoded%20spaces%20in%20path",
|
||||
"https://example.com/encoded%20spaces%20in%20path",
|
||||
],
|
||||
["mailto:hello+world@example.com", "mailto:hello+world@example.com"],
|
||||
["www.a.com/#a=@?q=rr&r=y", "http://www.a.com/#a=@?q=rr&r=y"],
|
||||
["http://a.com/1/2/3/4\\5\\6", "http://a.com/1/2/3/4/5/6"],
|
||||
["http://www.example.com/foo;bar", "http://www.example.com/foo;bar"],
|
||||
// ["www.abc.com/#%%^&&*(", "http://www.abc.com/#%%^&&*("], TODO: Patch the regex to accept the whole URL.
|
||||
]);
|
||||
});
|
||||
|
||||
it("shouldn't find false positives", function () {
|
||||
const matches = Autolinker.findLinks(
|
||||
[
|
||||
"not a valid URL",
|
||||
"htp://misspelled-protocol.com",
|
||||
"example.com (missing protocol)",
|
||||
"https://[::1] (IPv6 loopback)",
|
||||
"http:// (just protocol)",
|
||||
"", // Blank.
|
||||
"http", // No colon.
|
||||
"www.", // Missing domain.
|
||||
"https-and-www", // Dash not colon.
|
||||
"http:/abc.com", // Missing slash.
|
||||
"http://((()),", // Only invalid chars in host name.
|
||||
"ftp://example.com", // Ftp scheme is not supported.
|
||||
"http:example.com", // Missing slashes.
|
||||
"http//[example.com", // Invalid IPv6 address.
|
||||
"http//[00:00:00:00:00:00", // Invalid IPv6 address.
|
||||
"http//[]", // Empty IPv6 address.
|
||||
"abc.example.com", // URL without scheme.
|
||||
"JD?M$0QP)lKn06l1apKDC@\\qJ4B!!(5m+j.7F790m", // Not a valid email.
|
||||
].join("\n")
|
||||
);
|
||||
expect(matches.length).toEqual(0);
|
||||
});
|
||||
|
||||
it("should correctly find links among mixed content", function () {
|
||||
const matches = Autolinker.findLinks(
|
||||
[
|
||||
"Here's a URL: https://example.com and an email: mailto:test@example.com",
|
||||
"www.example.com and more text",
|
||||
"Check this: http://example.com/path?query=1 and this mailto:info@domain.com",
|
||||
].join("\n")
|
||||
);
|
||||
expect(matches.length).toEqual(5);
|
||||
expect(matches[0].url).toEqual("https://example.com/");
|
||||
expect(matches[1].url).toEqual("mailto:test@example.com");
|
||||
expect(matches[2].url).toEqual("http://www.example.com/");
|
||||
expect(matches[3].url).toEqual("http://example.com/path?query=1");
|
||||
expect(matches[4].url).toEqual("mailto:info@domain.com");
|
||||
});
|
||||
|
||||
it("should correctly work with special characters", function () {
|
||||
testLinks([
|
||||
[
|
||||
"https://example.com/path/to/page?query=value&symbol=£",
|
||||
"https://example.com/path/to/page?query=value&symbol=%C2%A3",
|
||||
],
|
||||
[
|
||||
"mailto:user.name+alias@example-domain.com",
|
||||
"mailto:user.name+alias@example-domain.com",
|
||||
],
|
||||
["http://example.com/@user", "http://example.com/@user"],
|
||||
["https://example.com/path#@anchor", "https://example.com/path#@anchor"],
|
||||
["www.测试.net", "http://www.xn--0zwm56d.net/"],
|
||||
["www.测试.net;", "http://www.xn--0zwm56d.net/"],
|
||||
// [ "www.测试。net。", "http://www.xn--0zwm56d.net/" ] TODO: Patch `createValidAbsoluteUrl` to accept this.
|
||||
]);
|
||||
});
|
||||
|
||||
it("should correctly find links with dashes and newlines between numbers", function () {
|
||||
const matches = Autolinker.findLinks("http://abcd.efg/test1-\n2/test.html");
|
||||
expect(matches.length).toEqual(1);
|
||||
expect(matches[0].url).toEqual("http://abcd.efg/test1-2/test.html");
|
||||
});
|
||||
|
||||
it("should correctly identify emails with special prefixes", function () {
|
||||
testLinks([
|
||||
["wwwtest@email.com", "mailto:wwwtest@email.com"],
|
||||
["httptest@email.com", "mailto:httptest@email.com"],
|
||||
]);
|
||||
});
|
||||
|
||||
it("shouldn't remove the dash when it's an the end of a line (bug 1974112)", function () {
|
||||
testLinks([
|
||||
[
|
||||
"https://github.com/pypi/linehaul-cloud-\nfunction",
|
||||
"https://github.com/pypi/linehaul-cloud-function",
|
||||
],
|
||||
]);
|
||||
});
|
||||
});
|
||||
71
test/unit/bidi_spec.js
Normal file
71
test/unit/bidi_spec.js
Normal file
@@ -0,0 +1,71 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { bidi } from "../../src/core/bidi.js";
|
||||
|
||||
describe("bidi", function () {
|
||||
it(
|
||||
"should mark text as LTR if there's only LTR-characters, " +
|
||||
"when the string is very short",
|
||||
function () {
|
||||
const str = "foo";
|
||||
const bidiText = bidi(str, -1, false);
|
||||
|
||||
expect(bidiText.str).toEqual("foo");
|
||||
expect(bidiText.dir).toEqual("ltr");
|
||||
}
|
||||
);
|
||||
|
||||
it("should mark text as LTR if there's only LTR-characters", function () {
|
||||
const str = "Lorem ipsum dolor sit amet, consectetur adipisicing elit.";
|
||||
const bidiText = bidi(str, -1, false);
|
||||
|
||||
expect(bidiText.str).toEqual(
|
||||
"Lorem ipsum dolor sit amet, consectetur adipisicing elit."
|
||||
);
|
||||
expect(bidiText.dir).toEqual("ltr");
|
||||
});
|
||||
|
||||
it("should mark text as RTL if more than 30% of text is RTL", function () {
|
||||
// 33% of test text are RTL characters
|
||||
const test = "\u0645\u0635\u0631 Egypt";
|
||||
const result = "Egypt \u0631\u0635\u0645";
|
||||
const bidiText = bidi(test, -1, false);
|
||||
|
||||
expect(bidiText.str).toEqual(result);
|
||||
expect(bidiText.dir).toEqual("rtl");
|
||||
});
|
||||
|
||||
it("should mark text as LTR if less than 30% of text is RTL", function () {
|
||||
const test = "Egypt is known as \u0645\u0635\u0631 in Arabic.";
|
||||
const result = "Egypt is known as \u0631\u0635\u0645 in Arabic.";
|
||||
const bidiText = bidi(test, -1, false);
|
||||
|
||||
expect(bidiText.str).toEqual(result);
|
||||
expect(bidiText.dir).toEqual("ltr");
|
||||
});
|
||||
|
||||
it(
|
||||
"should mark text as RTL if less than 30% of text is RTL, " +
|
||||
"when the string is very short (issue 11656)",
|
||||
function () {
|
||||
const str = "()\u05d1("; // 25% of the string is RTL characters.
|
||||
const bidiText = bidi(str, -1, false);
|
||||
|
||||
expect(bidiText.str).toEqual("(\u05d1)(");
|
||||
expect(bidiText.dir).toEqual("rtl");
|
||||
}
|
||||
);
|
||||
});
|
||||
164
test/unit/bin_font_info_spec.js
Normal file
164
test/unit/bin_font_info_spec.js
Normal file
@@ -0,0 +1,164 @@
|
||||
/* Copyright 2025 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
CssFontInfo,
|
||||
FontInfo,
|
||||
SystemFontInfo,
|
||||
} from "../../src/shared/obj-bin-transform.js";
|
||||
|
||||
const cssFontInfo = {
|
||||
fontFamily: "Sample Family",
|
||||
fontWeight: "not a number",
|
||||
italicAngle: "angle",
|
||||
uselessProp: "doesn't matter",
|
||||
};
|
||||
|
||||
const systemFontInfo = {
|
||||
guessFallback: false,
|
||||
css: "some string",
|
||||
loadedName: "another string",
|
||||
baseFontName: "base name",
|
||||
src: "source",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "400",
|
||||
uselessProp: "doesn't matter",
|
||||
},
|
||||
uselessProp: "doesn't matter",
|
||||
};
|
||||
|
||||
const fontInfo = {
|
||||
black: true,
|
||||
bold: true,
|
||||
disableFontFace: true,
|
||||
fontExtraProperties: true,
|
||||
isInvalidPDFjsFont: true,
|
||||
isType3Font: true,
|
||||
italic: true,
|
||||
missingFile: true,
|
||||
remeasure: true,
|
||||
vertical: true,
|
||||
ascent: 1,
|
||||
defaultWidth: 1,
|
||||
descent: 1,
|
||||
bbox: [1, 1, 1, 1],
|
||||
fontMatrix: [1, 1, 1, 1, 1, 1],
|
||||
defaultVMetrics: [1, 1, 1],
|
||||
fallbackName: "string",
|
||||
loadedName: "string",
|
||||
mimetype: "string",
|
||||
name: "string",
|
||||
data: new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
|
||||
uselessProp: "something",
|
||||
};
|
||||
|
||||
describe("font data serialization and deserialization", function () {
|
||||
describe("CssFontInfo", function () {
|
||||
it("must roundtrip correctly for CssFontInfo", function () {
|
||||
const encoder = new TextEncoder();
|
||||
let sizeEstimate = 0;
|
||||
for (const string of ["Sample Family", "not a number", "angle"]) {
|
||||
sizeEstimate += 4 + encoder.encode(string).length;
|
||||
}
|
||||
const buffer = CssFontInfo.write(cssFontInfo);
|
||||
expect(buffer.byteLength).toEqual(sizeEstimate);
|
||||
const deserialized = new CssFontInfo(buffer);
|
||||
expect(deserialized.fontFamily).toEqual("Sample Family");
|
||||
expect(deserialized.fontWeight).toEqual("not a number");
|
||||
expect(deserialized.italicAngle).toEqual("angle");
|
||||
expect(deserialized.uselessProp).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("SystemFontInfo", function () {
|
||||
it("must roundtrip correctly for SystemFontInfo", function () {
|
||||
const encoder = new TextEncoder();
|
||||
let sizeEstimate = 1 + 4;
|
||||
for (const string of [
|
||||
"some string",
|
||||
"another string",
|
||||
"base name",
|
||||
"source",
|
||||
"normal",
|
||||
"400",
|
||||
]) {
|
||||
sizeEstimate += 4 + encoder.encode(string).length;
|
||||
}
|
||||
const buffer = SystemFontInfo.write(systemFontInfo);
|
||||
expect(buffer.byteLength).toEqual(sizeEstimate);
|
||||
const deserialized = new SystemFontInfo(buffer);
|
||||
expect(deserialized.guessFallback).toEqual(false);
|
||||
expect(deserialized.css).toEqual("some string");
|
||||
expect(deserialized.loadedName).toEqual("another string");
|
||||
expect(deserialized.baseFontName).toEqual("base name");
|
||||
expect(deserialized.src).toEqual("source");
|
||||
expect(deserialized.style.style).toEqual("normal");
|
||||
expect(deserialized.style.weight).toEqual("400");
|
||||
expect(deserialized.style.uselessProp).toBeUndefined();
|
||||
expect(deserialized.uselessProp).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("FontInfo", function () {
|
||||
it("must roundtrip correctly for FontInfo", function () {
|
||||
let sizeEstimate = 92; // fixed offset until the strings
|
||||
const encoder = new TextEncoder();
|
||||
sizeEstimate += 4 + 4 * (4 + encoder.encode("string").length);
|
||||
sizeEstimate += 4 + 4; // cssFontInfo and systemFontInfo
|
||||
sizeEstimate += 4 + fontInfo.data.length;
|
||||
const buffer = FontInfo.write(fontInfo);
|
||||
expect(buffer.byteLength).toEqual(sizeEstimate);
|
||||
const deserialized = new FontInfo({ data: buffer });
|
||||
expect(deserialized.black).toEqual(true);
|
||||
expect(deserialized.bold).toEqual(true);
|
||||
expect(deserialized.disableFontFace).toEqual(true);
|
||||
expect(deserialized.fontExtraProperties).toEqual(true);
|
||||
expect(deserialized.isInvalidPDFjsFont).toEqual(true);
|
||||
expect(deserialized.isType3Font).toEqual(true);
|
||||
expect(deserialized.italic).toEqual(true);
|
||||
expect(deserialized.missingFile).toEqual(true);
|
||||
expect(deserialized.remeasure).toEqual(true);
|
||||
expect(deserialized.vertical).toEqual(true);
|
||||
expect(deserialized.ascent).toEqual(1);
|
||||
expect(deserialized.defaultWidth).toEqual(1);
|
||||
expect(deserialized.descent).toEqual(1);
|
||||
expect(deserialized.bbox).toEqual([1, 1, 1, 1]);
|
||||
expect(deserialized.fontMatrix).toEqual([1, 1, 1, 1, 1, 1]);
|
||||
expect(deserialized.defaultVMetrics).toEqual([1, 1, 1]);
|
||||
expect(deserialized.fallbackName).toEqual("string");
|
||||
expect(deserialized.loadedName).toEqual("string");
|
||||
expect(deserialized.mimetype).toEqual("string");
|
||||
expect(deserialized.name).toEqual("string");
|
||||
expect(Array.from(deserialized.data)).toEqual([
|
||||
1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
|
||||
]);
|
||||
expect(deserialized.uselessProp).toBeUndefined();
|
||||
expect(deserialized.cssFontInfo).toBeNull();
|
||||
expect(deserialized.systemFontInfo).toBeNull();
|
||||
});
|
||||
|
||||
it("nesting should work as expected", function () {
|
||||
const buffer = FontInfo.write({
|
||||
...fontInfo,
|
||||
cssFontInfo,
|
||||
systemFontInfo,
|
||||
});
|
||||
const deserialized = new FontInfo({ data: buffer });
|
||||
expect(deserialized.cssFontInfo.fontWeight).toEqual("not a number");
|
||||
expect(deserialized.systemFontInfo.src).toEqual("source");
|
||||
});
|
||||
});
|
||||
});
|
||||
111
test/unit/canvas_factory_spec.js
Normal file
111
test/unit/canvas_factory_spec.js
Normal file
@@ -0,0 +1,111 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { DOMCanvasFactory } from "../../src/display/canvas_factory.js";
|
||||
import { isNodeJS } from "../../src/shared/util.js";
|
||||
|
||||
describe("canvas_factory", function () {
|
||||
describe("DOMCanvasFactory", function () {
|
||||
let canvasFactory;
|
||||
|
||||
beforeAll(function () {
|
||||
canvasFactory = new DOMCanvasFactory({});
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
canvasFactory = null;
|
||||
});
|
||||
|
||||
it("`create` should throw an error if the dimensions are invalid", function () {
|
||||
// Invalid width.
|
||||
expect(function () {
|
||||
return canvasFactory.create(-1, 1);
|
||||
}).toThrow(new Error("Invalid canvas size"));
|
||||
|
||||
// Invalid height.
|
||||
expect(function () {
|
||||
return canvasFactory.create(1, -1);
|
||||
}).toThrow(new Error("Invalid canvas size"));
|
||||
});
|
||||
|
||||
it("`create` should return a canvas if the dimensions are valid", function () {
|
||||
if (isNodeJS) {
|
||||
pending("Document is not supported in Node.js.");
|
||||
}
|
||||
|
||||
const { canvas, context } = canvasFactory.create(20, 40);
|
||||
expect(canvas instanceof HTMLCanvasElement).toBe(true);
|
||||
expect(context instanceof CanvasRenderingContext2D).toBe(true);
|
||||
expect(canvas.width).toBe(20);
|
||||
expect(canvas.height).toBe(40);
|
||||
});
|
||||
|
||||
it("`reset` should throw an error if no canvas is provided", function () {
|
||||
const canvasAndContext = { canvas: null, context: null };
|
||||
|
||||
expect(function () {
|
||||
return canvasFactory.reset(canvasAndContext, 20, 40);
|
||||
}).toThrow(new Error("Canvas is not specified"));
|
||||
});
|
||||
|
||||
it("`reset` should throw an error if the dimensions are invalid", function () {
|
||||
const canvasAndContext = { canvas: "foo", context: "bar" };
|
||||
|
||||
// Invalid width.
|
||||
expect(function () {
|
||||
return canvasFactory.reset(canvasAndContext, -1, 1);
|
||||
}).toThrow(new Error("Invalid canvas size"));
|
||||
|
||||
// Invalid height.
|
||||
expect(function () {
|
||||
return canvasFactory.reset(canvasAndContext, 1, -1);
|
||||
}).toThrow(new Error("Invalid canvas size"));
|
||||
});
|
||||
|
||||
it("`reset` should alter the canvas/context if the dimensions are valid", function () {
|
||||
if (isNodeJS) {
|
||||
pending("Document is not supported in Node.js.");
|
||||
}
|
||||
|
||||
const canvasAndContext = canvasFactory.create(20, 40);
|
||||
canvasFactory.reset(canvasAndContext, 60, 80);
|
||||
|
||||
const { canvas, context } = canvasAndContext;
|
||||
expect(canvas instanceof HTMLCanvasElement).toBe(true);
|
||||
expect(context instanceof CanvasRenderingContext2D).toBe(true);
|
||||
expect(canvas.width).toBe(60);
|
||||
expect(canvas.height).toBe(80);
|
||||
});
|
||||
|
||||
it("`destroy` should throw an error if no canvas is provided", function () {
|
||||
expect(function () {
|
||||
return canvasFactory.destroy({});
|
||||
}).toThrow(new Error("Canvas is not specified"));
|
||||
});
|
||||
|
||||
it("`destroy` should clear the canvas/context", function () {
|
||||
if (isNodeJS) {
|
||||
pending("Document is not supported in Node.js.");
|
||||
}
|
||||
|
||||
const canvasAndContext = canvasFactory.create(20, 40);
|
||||
canvasFactory.destroy(canvasAndContext);
|
||||
|
||||
const { canvas, context } = canvasAndContext;
|
||||
expect(canvas).toBe(null);
|
||||
expect(context).toBe(null);
|
||||
});
|
||||
});
|
||||
});
|
||||
504
test/unit/cff_parser_spec.js
Normal file
504
test/unit/cff_parser_spec.js
Normal file
@@ -0,0 +1,504 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
CFFCharset,
|
||||
CFFCompiler,
|
||||
CFFFDSelect,
|
||||
CFFParser,
|
||||
CFFStrings,
|
||||
} from "../../src/core/cff_parser.js";
|
||||
import { SEAC_ANALYSIS_ENABLED } from "../../src/core/fonts_utils.js";
|
||||
import { Stream } from "../../src/core/stream.js";
|
||||
|
||||
describe("CFFParser", function () {
|
||||
function createWithNullProto(obj) {
|
||||
const result = Object.create(null);
|
||||
for (const i in obj) {
|
||||
result[i] = obj[i];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Stub that returns `0` for any privateDict key.
|
||||
const privateDictStub = {
|
||||
getByName(name) {
|
||||
return 0;
|
||||
},
|
||||
};
|
||||
|
||||
let fontData, parser, cff;
|
||||
|
||||
beforeAll(function () {
|
||||
// This example font comes from the CFF spec:
|
||||
// http://www.adobe.com/content/dam/Adobe/en/devnet/font/pdfs/5176.CFF.pdf
|
||||
const exampleFont =
|
||||
"0100040100010101134142434445462b" +
|
||||
"54696d65732d526f6d616e000101011f" +
|
||||
"f81b00f81c02f81d03f819041c6f000d" +
|
||||
"fb3cfb6efa7cfa1605e911b8f1120003" +
|
||||
"01010813183030312e30303754696d65" +
|
||||
"7320526f6d616e54696d657300000002" +
|
||||
"010102030e0e7d99f92a99fb7695f773" +
|
||||
"8b06f79a93fc7c8c077d99f85695f75e" +
|
||||
"9908fb6e8cf87393f7108b09a70adf0b" +
|
||||
"f78e14";
|
||||
const fontArr = [];
|
||||
for (let i = 0, ii = exampleFont.length; i < ii; i += 2) {
|
||||
const hex = exampleFont.substring(i, i + 2);
|
||||
fontArr.push(parseInt(hex, 16));
|
||||
}
|
||||
fontData = new Stream(fontArr);
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
fontData = null;
|
||||
});
|
||||
|
||||
beforeEach(function () {
|
||||
parser = new CFFParser(fontData, {}, SEAC_ANALYSIS_ENABLED);
|
||||
cff = parser.parse();
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
parser = cff = null;
|
||||
});
|
||||
|
||||
it("parses header", function () {
|
||||
const header = cff.header;
|
||||
expect(header.major).toEqual(1);
|
||||
expect(header.minor).toEqual(0);
|
||||
expect(header.hdrSize).toEqual(4);
|
||||
expect(header.offSize).toEqual(1);
|
||||
});
|
||||
|
||||
it("parses name index", function () {
|
||||
const names = cff.names;
|
||||
expect(names.length).toEqual(1);
|
||||
expect(names[0]).toEqual("ABCDEF+Times-Roman");
|
||||
});
|
||||
|
||||
it("parses string index", function () {
|
||||
const strings = cff.strings;
|
||||
expect(strings.count).toEqual(3);
|
||||
expect(strings.get(0)).toEqual(".notdef");
|
||||
expect(strings.get(391)).toEqual("001.007");
|
||||
});
|
||||
|
||||
it("parses top dict", function () {
|
||||
const topDict = cff.topDict;
|
||||
// 391 version 392 FullName 393 FamilyName 389 Weight 28416 UniqueID
|
||||
// -168 -218 1000 898 FontBBox 94 CharStrings 45 102 Private
|
||||
expect(topDict.getByName("version")).toEqual(391);
|
||||
expect(topDict.getByName("FullName")).toEqual(392);
|
||||
expect(topDict.getByName("FamilyName")).toEqual(393);
|
||||
expect(topDict.getByName("Weight")).toEqual(389);
|
||||
expect(topDict.getByName("UniqueID")).toEqual(28416);
|
||||
expect(topDict.getByName("FontBBox")).toEqual([-168, -218, 1000, 898]);
|
||||
expect(topDict.getByName("CharStrings")).toEqual(94);
|
||||
expect(topDict.getByName("Private")).toEqual([45, 102]);
|
||||
});
|
||||
|
||||
it("refuses to add topDict key with invalid value (bug 1068432)", function () {
|
||||
const topDict = cff.topDict;
|
||||
const defaultValue = topDict.getByName("UnderlinePosition");
|
||||
|
||||
topDict.setByKey(/* [12, 3] = */ 3075, [NaN]);
|
||||
expect(topDict.getByName("UnderlinePosition")).toEqual(defaultValue);
|
||||
});
|
||||
|
||||
it(
|
||||
"ignores reserved commands in parseDict, and refuses to add privateDict " +
|
||||
"keys with invalid values (bug 1308536)",
|
||||
function () {
|
||||
const bytes = new Uint8Array([
|
||||
64, 39, 31, 30, 252, 114, 137, 115, 79, 30, 197, 119, 2, 99, 127, 6,
|
||||
]);
|
||||
parser.bytes = bytes;
|
||||
const topDict = cff.topDict;
|
||||
topDict.setByName("Private", [bytes.length, 0]);
|
||||
|
||||
const parsePrivateDict = function () {
|
||||
parser.parsePrivateDict(topDict);
|
||||
};
|
||||
expect(parsePrivateDict).not.toThrow();
|
||||
|
||||
const privateDict = topDict.privateDict;
|
||||
expect(privateDict.getByName("BlueValues")).toBeNull();
|
||||
}
|
||||
);
|
||||
|
||||
it("parses a CharString having cntrmask", function () {
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0, 1, // count
|
||||
1, // offsetSize
|
||||
0, // offset[0]
|
||||
38, // end
|
||||
149, 149, 149, 149, 149, 149, 149, 149,
|
||||
149, 149, 149, 149, 149, 149, 149, 149,
|
||||
1, // hstem
|
||||
149, 149, 149, 149, 149, 149, 149, 149,
|
||||
149, 149, 149, 149, 149, 149, 149, 149,
|
||||
3, // vstem
|
||||
20, // cntrmask
|
||||
22, 22, // fail if misparsed as hmoveto
|
||||
14 // endchar
|
||||
]);
|
||||
parser.bytes = bytes;
|
||||
const charStringsIndex = parser.parseIndex(0).obj;
|
||||
const charStrings = parser.parseCharStrings({
|
||||
charStrings: charStringsIndex,
|
||||
privateDict: privateDictStub,
|
||||
}).charStrings;
|
||||
expect(charStrings.count).toEqual(1);
|
||||
// shouldn't be sanitized
|
||||
expect(charStrings.get(0).length).toEqual(38);
|
||||
});
|
||||
|
||||
it("parses a CharString endchar with 4 args w/seac enabled", function () {
|
||||
const cffParser = new CFFParser(
|
||||
fontData,
|
||||
{},
|
||||
/* seacAnalysisEnabled = */ true
|
||||
);
|
||||
cffParser.parse(); // cff
|
||||
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0, 1, // count
|
||||
1, // offsetSize
|
||||
0, // offset[0]
|
||||
237, 247, 22, 247, 72, 204, 247, 86, 14]);
|
||||
cffParser.bytes = bytes;
|
||||
const charStringsIndex = cffParser.parseIndex(0).obj;
|
||||
const result = cffParser.parseCharStrings({
|
||||
charStrings: charStringsIndex,
|
||||
privateDict: privateDictStub,
|
||||
});
|
||||
expect(result.charStrings.count).toEqual(1);
|
||||
expect(result.charStrings.get(0).length).toEqual(1);
|
||||
expect(result.seacs.length).toEqual(1);
|
||||
expect(result.seacs[0].length).toEqual(4);
|
||||
expect(result.seacs[0][0]).toEqual(130);
|
||||
expect(result.seacs[0][1]).toEqual(180);
|
||||
expect(result.seacs[0][2]).toEqual(65);
|
||||
expect(result.seacs[0][3]).toEqual(194);
|
||||
});
|
||||
|
||||
it("parses a CharString endchar with 4 args w/seac disabled", function () {
|
||||
const cffParser = new CFFParser(
|
||||
fontData,
|
||||
{},
|
||||
/* seacAnalysisEnabled = */ false
|
||||
);
|
||||
cffParser.parse(); // cff
|
||||
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0, 1, // count
|
||||
1, // offsetSize
|
||||
0, // offset[0]
|
||||
237, 247, 22, 247, 72, 204, 247, 86, 14]);
|
||||
cffParser.bytes = bytes;
|
||||
const charStringsIndex = cffParser.parseIndex(0).obj;
|
||||
const result = cffParser.parseCharStrings({
|
||||
charStrings: charStringsIndex,
|
||||
privateDict: privateDictStub,
|
||||
});
|
||||
expect(result.charStrings.count).toEqual(1);
|
||||
expect(result.charStrings.get(0).length).toEqual(9);
|
||||
expect(result.seacs.length).toEqual(0);
|
||||
});
|
||||
|
||||
it("parses a CharString endchar no args", function () {
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0, 1, // count
|
||||
1, // offsetSize
|
||||
0, // offset[0]
|
||||
14]);
|
||||
parser.bytes = bytes;
|
||||
const charStringsIndex = parser.parseIndex(0).obj;
|
||||
const result = parser.parseCharStrings({
|
||||
charStrings: charStringsIndex,
|
||||
privateDict: privateDictStub,
|
||||
});
|
||||
expect(result.charStrings.count).toEqual(1);
|
||||
expect(result.charStrings.get(0)[0]).toEqual(14);
|
||||
expect(result.seacs.length).toEqual(0);
|
||||
});
|
||||
|
||||
it("parses predefined charsets", function () {
|
||||
const charset = parser.parseCharsets(0, 0, null, true);
|
||||
expect(charset.predefined).toEqual(true);
|
||||
});
|
||||
|
||||
it("parses charset format 0", function () {
|
||||
// The first three bytes make the offset large enough to skip predefined.
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0x00, 0x00, 0x00,
|
||||
0x00, // format
|
||||
0x00, 0x02 // sid/cid
|
||||
]);
|
||||
parser.bytes = bytes;
|
||||
let charset = parser.parseCharsets(3, 2, new CFFStrings(), false);
|
||||
expect(charset.charset[1]).toEqual("exclam");
|
||||
|
||||
// CID font
|
||||
charset = parser.parseCharsets(3, 2, new CFFStrings(), true);
|
||||
expect(charset.charset[1]).toEqual(2);
|
||||
});
|
||||
|
||||
it("parses charset format 1", function () {
|
||||
// The first three bytes make the offset large enough to skip predefined.
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0x00, 0x00, 0x00,
|
||||
0x01, // format
|
||||
0x00, 0x08, // sid/cid start
|
||||
0x01 // sid/cid left
|
||||
]);
|
||||
parser.bytes = bytes;
|
||||
let charset = parser.parseCharsets(3, 2, new CFFStrings(), false);
|
||||
expect(charset.charset).toEqual([".notdef", "quoteright", "parenleft"]);
|
||||
|
||||
// CID font
|
||||
charset = parser.parseCharsets(3, 2, new CFFStrings(), true);
|
||||
expect(charset.charset).toEqual([0, 8, 9]);
|
||||
});
|
||||
|
||||
it("parses charset format 2", function () {
|
||||
// format 2 is the same as format 1 but the left is card16
|
||||
// The first three bytes make the offset large enough to skip predefined.
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0x00, 0x00, 0x00,
|
||||
0x02, // format
|
||||
0x00, 0x08, // sid/cid start
|
||||
0x00, 0x01 // sid/cid left
|
||||
]);
|
||||
parser.bytes = bytes;
|
||||
let charset = parser.parseCharsets(3, 2, new CFFStrings(), false);
|
||||
expect(charset.charset).toEqual([".notdef", "quoteright", "parenleft"]);
|
||||
|
||||
// CID font
|
||||
charset = parser.parseCharsets(3, 2, new CFFStrings(), true);
|
||||
expect(charset.charset).toEqual([0, 8, 9]);
|
||||
});
|
||||
|
||||
it("parses encoding format 0", function () {
|
||||
// The first two bytes make the offset large enough to skip predefined.
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0x00, 0x00,
|
||||
0x00, // format
|
||||
0x01, // count
|
||||
0x08 // start
|
||||
]);
|
||||
parser.bytes = bytes;
|
||||
const encoding = parser.parseEncoding(2, {}, new CFFStrings(), null);
|
||||
expect(encoding.encoding).toEqual(createWithNullProto({ 0x8: 1 }));
|
||||
});
|
||||
|
||||
it("parses encoding format 1", function () {
|
||||
// The first two bytes make the offset large enough to skip predefined.
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0x00, 0x00,
|
||||
0x01, // format
|
||||
0x01, // num ranges
|
||||
0x07, // range1 start
|
||||
0x01 // range2 left
|
||||
]);
|
||||
parser.bytes = bytes;
|
||||
const encoding = parser.parseEncoding(2, {}, new CFFStrings(), null);
|
||||
expect(encoding.encoding).toEqual(
|
||||
createWithNullProto({ 0x7: 0x01, 0x08: 0x02 })
|
||||
);
|
||||
});
|
||||
|
||||
it("parses fdselect format 0", function () {
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0x00, // format
|
||||
0x00, // gid: 0 fd: 0
|
||||
0x01 // gid: 1 fd: 1
|
||||
]);
|
||||
parser.bytes = bytes.slice();
|
||||
const fdSelect = parser.parseFDSelect(0, 2);
|
||||
|
||||
expect(fdSelect.fdSelect).toEqual([0, 1]);
|
||||
expect(fdSelect.format).toEqual(0);
|
||||
});
|
||||
|
||||
it("parses fdselect format 3", function () {
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0x03, // format
|
||||
0x00, 0x02, // range count
|
||||
0x00, 0x00, // first gid
|
||||
0x09, // font dict 1 id
|
||||
0x00, 0x02, // next gid
|
||||
0x0a, // font dict 2 id
|
||||
0x00, 0x04 // sentinel (last gid)
|
||||
]);
|
||||
parser.bytes = bytes.slice();
|
||||
const fdSelect = parser.parseFDSelect(0, 4);
|
||||
|
||||
expect(fdSelect.fdSelect).toEqual([9, 9, 0xa, 0xa]);
|
||||
expect(fdSelect.format).toEqual(3);
|
||||
});
|
||||
|
||||
it("parses invalid fdselect format 3 (bug 1146106)", function () {
|
||||
// prettier-ignore
|
||||
const bytes = new Uint8Array([0x03, // format
|
||||
0x00, 0x02, // range count
|
||||
0x00, 0x01, // first gid (invalid)
|
||||
0x09, // font dict 1 id
|
||||
0x00, 0x02, // next gid
|
||||
0x0a, // font dict 2 id
|
||||
0x00, 0x04 // sentinel (last gid)
|
||||
]);
|
||||
parser.bytes = bytes.slice();
|
||||
const fdSelect = parser.parseFDSelect(0, 4);
|
||||
|
||||
expect(fdSelect.fdSelect).toEqual([9, 9, 0xa, 0xa]);
|
||||
expect(fdSelect.format).toEqual(3);
|
||||
});
|
||||
|
||||
// TODO fdArray
|
||||
});
|
||||
|
||||
describe("CFFCompiler", function () {
|
||||
function testParser(bytes) {
|
||||
bytes = new Uint8Array(bytes);
|
||||
return new CFFParser(
|
||||
{
|
||||
getBytes: () => bytes,
|
||||
},
|
||||
{},
|
||||
SEAC_ANALYSIS_ENABLED
|
||||
);
|
||||
}
|
||||
|
||||
it("encodes integers", function () {
|
||||
const c = new CFFCompiler();
|
||||
// all the examples from the spec
|
||||
expect(c.encodeInteger(0)).toEqual([0x8b]);
|
||||
expect(c.encodeInteger(100)).toEqual([0xef]);
|
||||
expect(c.encodeInteger(-100)).toEqual([0x27]);
|
||||
expect(c.encodeInteger(1000)).toEqual([0xfa, 0x7c]);
|
||||
expect(c.encodeInteger(-1000)).toEqual([0xfe, 0x7c]);
|
||||
expect(c.encodeInteger(10000)).toEqual([0x1c, 0x27, 0x10]);
|
||||
expect(c.encodeInteger(-10000)).toEqual([0x1c, 0xd8, 0xf0]);
|
||||
expect(c.encodeInteger(100000)).toEqual([0x1d, 0x00, 0x01, 0x86, 0xa0]);
|
||||
expect(c.encodeInteger(-100000)).toEqual([0x1d, 0xff, 0xfe, 0x79, 0x60]);
|
||||
});
|
||||
|
||||
it("encodes floats", function () {
|
||||
const c = new CFFCompiler();
|
||||
expect(c.encodeFloat(-2.25)).toEqual([0x1e, 0xe2, 0xa2, 0x5f]);
|
||||
expect(c.encodeFloat(5e-11)).toEqual([0x1e, 0x5c, 0x11, 0xff]);
|
||||
});
|
||||
|
||||
it("sanitizes name index", function () {
|
||||
const c = new CFFCompiler();
|
||||
let nameIndexCompiled = c.compileNameIndex(["[a"]);
|
||||
let parser = testParser(nameIndexCompiled);
|
||||
let nameIndex = parser.parseIndex(0);
|
||||
let names = parser.parseNameIndex(nameIndex.obj);
|
||||
expect(names).toEqual(["_a"]);
|
||||
|
||||
let longName = "";
|
||||
for (let i = 0; i < 129; i++) {
|
||||
longName += "_";
|
||||
}
|
||||
nameIndexCompiled = c.compileNameIndex([longName]);
|
||||
parser = testParser(nameIndexCompiled);
|
||||
nameIndex = parser.parseIndex(0);
|
||||
names = parser.parseNameIndex(nameIndex.obj);
|
||||
expect(names[0].length).toEqual(127);
|
||||
});
|
||||
|
||||
it("compiles fdselect format 0", function () {
|
||||
const fdSelect = new CFFFDSelect(0, [3, 2, 1]);
|
||||
const c = new CFFCompiler();
|
||||
const out = c.compileFDSelect(fdSelect);
|
||||
expect(out).toEqual([
|
||||
0, // format
|
||||
3, // gid: 0 fd 3
|
||||
2, // gid: 1 fd 3
|
||||
1, // gid: 2 fd 3
|
||||
]);
|
||||
});
|
||||
|
||||
it("compiles fdselect format 3", function () {
|
||||
const fdSelect = new CFFFDSelect(3, [0, 0, 1, 1]);
|
||||
const c = new CFFCompiler();
|
||||
const out = c.compileFDSelect(fdSelect);
|
||||
expect(out).toEqual([
|
||||
3, // format
|
||||
0, // nRanges (high)
|
||||
2, // nRanges (low)
|
||||
0, // range struct 0 - first (high)
|
||||
0, // range struct 0 - first (low)
|
||||
0, // range struct 0 - fd
|
||||
0, // range struct 0 - first (high)
|
||||
2, // range struct 0 - first (low)
|
||||
1, // range struct 0 - fd
|
||||
0, // sentinel (high)
|
||||
4, // sentinel (low)
|
||||
]);
|
||||
});
|
||||
|
||||
it("compiles fdselect format 3, single range", function () {
|
||||
const fdSelect = new CFFFDSelect(3, [0, 0]);
|
||||
const c = new CFFCompiler();
|
||||
const out = c.compileFDSelect(fdSelect);
|
||||
expect(out).toEqual([
|
||||
3, // format
|
||||
0, // nRanges (high)
|
||||
1, // nRanges (low)
|
||||
0, // range struct 0 - first (high)
|
||||
0, // range struct 0 - first (low)
|
||||
0, // range struct 0 - fd
|
||||
0, // sentinel (high)
|
||||
2, // sentinel (low)
|
||||
]);
|
||||
});
|
||||
|
||||
it("compiles charset of CID font", function () {
|
||||
const charset = new CFFCharset();
|
||||
const c = new CFFCompiler();
|
||||
const numGlyphs = 7;
|
||||
const out = c.compileCharset(charset, numGlyphs, new CFFStrings(), true);
|
||||
// All CID charsets get turned into a simple format 2.
|
||||
expect(out).toEqual([
|
||||
2, // format
|
||||
0, // cid (high)
|
||||
1, // cid (low)
|
||||
0, // nLeft (high)
|
||||
numGlyphs - 2, // nLeft (low)
|
||||
]);
|
||||
});
|
||||
|
||||
it("compiles charset of non CID font", function () {
|
||||
const charset = new CFFCharset(false, 0, ["space", "exclam"]);
|
||||
const c = new CFFCompiler();
|
||||
const numGlyphs = 3;
|
||||
const out = c.compileCharset(charset, numGlyphs, new CFFStrings(), false);
|
||||
// All non-CID fonts use a format 0 charset.
|
||||
expect(out).toEqual([
|
||||
0, // format
|
||||
0, // sid of 'space' (high)
|
||||
1, // sid of 'space' (low)
|
||||
0, // sid of 'exclam' (high)
|
||||
2, // sid of 'exclam' (low)
|
||||
]);
|
||||
});
|
||||
|
||||
// TODO a lot more compiler tests
|
||||
});
|
||||
61
test/unit/clitests.json
Normal file
61
test/unit/clitests.json
Normal file
@@ -0,0 +1,61 @@
|
||||
{
|
||||
"spec_dir": "build/lib-legacy/test/unit",
|
||||
|
||||
"helpers": ["clitests_helper.js"],
|
||||
|
||||
"spec_files": [
|
||||
"annotation_spec.js",
|
||||
"annotation_storage_spec.js",
|
||||
"api_spec.js",
|
||||
"app_options_spec.js",
|
||||
"autolinker_spec.js",
|
||||
"bidi_spec.js",
|
||||
"bin_font_info_spec.js",
|
||||
"canvas_factory_spec.js",
|
||||
"cff_parser_spec.js",
|
||||
"cmap_spec.js",
|
||||
"colorspace_spec.js",
|
||||
"core_utils_spec.js",
|
||||
"crypto_spec.js",
|
||||
"custom_spec.js",
|
||||
"default_appearance_spec.js",
|
||||
"display_utils_spec.js",
|
||||
"document_spec.js",
|
||||
"editor_spec.js",
|
||||
"encodings_spec.js",
|
||||
"evaluator_spec.js",
|
||||
"event_utils_spec.js",
|
||||
"fetch_stream_spec.js",
|
||||
"font_substitutions_spec.js",
|
||||
"function_spec.js",
|
||||
"message_handler_spec.js",
|
||||
"metadata_spec.js",
|
||||
"murmurhash3_spec.js",
|
||||
"network_utils_spec.js",
|
||||
"node_stream_spec.js",
|
||||
"parser_spec.js",
|
||||
"pdf.image_decoders_spec.js",
|
||||
"pdf.worker_spec.js",
|
||||
"pdf_find_controller_spec.js",
|
||||
"pdf_find_utils_spec.js",
|
||||
"pdf_history_spec.js",
|
||||
"pdf_spec.js",
|
||||
"pdf_viewer.component_spec.js",
|
||||
"pdf_viewer_spec.js",
|
||||
"primitives_spec.js",
|
||||
"stream_spec.js",
|
||||
"struct_tree_spec.js",
|
||||
"svg_factory_spec.js",
|
||||
"text_layer_spec.js",
|
||||
"type1_parser_spec.js",
|
||||
"ui_utils_spec.js",
|
||||
"unicode_spec.js",
|
||||
"util_spec.js",
|
||||
"writer_spec.js",
|
||||
"xfa_formcalc_spec.js",
|
||||
"xfa_parser_spec.js",
|
||||
"xfa_serialize_data_spec.js",
|
||||
"xfa_tohtml_spec.js",
|
||||
"xml_spec.js"
|
||||
]
|
||||
}
|
||||
34
test/unit/clitests_helper.js
Normal file
34
test/unit/clitests_helper.js
Normal file
@@ -0,0 +1,34 @@
|
||||
/* Copyright 2018 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
isNodeJS,
|
||||
setVerbosityLevel,
|
||||
VerbosityLevel,
|
||||
} from "../../src/shared/util.js";
|
||||
|
||||
// Sets longer timeout, similar to `jasmine-boot.js`.
|
||||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 30000;
|
||||
|
||||
// Ensure that this script only runs in Node.js environments.
|
||||
if (!isNodeJS) {
|
||||
throw new Error(
|
||||
"The `gulp unittestcli` command can only be used in Node.js environments."
|
||||
);
|
||||
}
|
||||
|
||||
// Reduce the amount of console "spam", by ignoring `info`/`warn` calls,
|
||||
// when running the unit-tests in Node.js/Travis.
|
||||
setVerbosityLevel(VerbosityLevel.ERRORS);
|
||||
254
test/unit/cmap_spec.js
Normal file
254
test/unit/cmap_spec.js
Normal file
@@ -0,0 +1,254 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { CMap, CMapFactory, IdentityCMap } from "../../src/core/cmap.js";
|
||||
import { CMAP_URL, DefaultCMapReaderFactory } from "./test_utils.js";
|
||||
import { Name } from "../../src/core/primitives.js";
|
||||
import { StringStream } from "../../src/core/stream.js";
|
||||
|
||||
describe("cmap", function () {
|
||||
let fetchBuiltInCMap;
|
||||
|
||||
beforeAll(function () {
|
||||
// Allow CMap testing in Node.js, e.g. for Travis.
|
||||
const CMapReaderFactory = new DefaultCMapReaderFactory({
|
||||
baseUrl: CMAP_URL,
|
||||
});
|
||||
|
||||
fetchBuiltInCMap = function (name) {
|
||||
return CMapReaderFactory.fetch({
|
||||
name,
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
fetchBuiltInCMap = null;
|
||||
});
|
||||
|
||||
it("parses beginbfchar", async function () {
|
||||
// prettier-ignore
|
||||
const str = "2 beginbfchar\n" +
|
||||
"<03> <00>\n" +
|
||||
"<04> <01>\n" +
|
||||
"endbfchar\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
expect(cmap.lookup(0x03)).toEqual(String.fromCharCode(0x00));
|
||||
expect(cmap.lookup(0x04)).toEqual(String.fromCharCode(0x01));
|
||||
expect(cmap.lookup(0x05)).toBeUndefined();
|
||||
});
|
||||
|
||||
it("parses beginbfrange with range", async function () {
|
||||
// prettier-ignore
|
||||
const str = "1 beginbfrange\n" +
|
||||
"<06> <0B> 0\n" +
|
||||
"endbfrange\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
expect(cmap.lookup(0x05)).toBeUndefined();
|
||||
expect(cmap.lookup(0x06)).toEqual(String.fromCharCode(0x00));
|
||||
expect(cmap.lookup(0x0b)).toEqual(String.fromCharCode(0x05));
|
||||
expect(cmap.lookup(0x0c)).toBeUndefined();
|
||||
});
|
||||
|
||||
it("parses beginbfrange with array", async function () {
|
||||
// prettier-ignore
|
||||
const str = "1 beginbfrange\n" +
|
||||
"<0D> <12> [ 0 1 2 3 4 5 ]\n" +
|
||||
"endbfrange\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
expect(cmap.lookup(0x0c)).toBeUndefined();
|
||||
expect(cmap.lookup(0x0d)).toEqual(0x00);
|
||||
expect(cmap.lookup(0x12)).toEqual(0x05);
|
||||
expect(cmap.lookup(0x13)).toBeUndefined();
|
||||
});
|
||||
|
||||
it("parses begincidchar", async function () {
|
||||
// prettier-ignore
|
||||
const str = "1 begincidchar\n" +
|
||||
"<14> 0\n" +
|
||||
"endcidchar\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
expect(cmap.lookup(0x14)).toEqual(0x00);
|
||||
expect(cmap.lookup(0x15)).toBeUndefined();
|
||||
});
|
||||
|
||||
it("parses begincidrange", async function () {
|
||||
// prettier-ignore
|
||||
const str = "1 begincidrange\n" +
|
||||
"<0016> <001B> 0\n" +
|
||||
"endcidrange\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
expect(cmap.lookup(0x15)).toBeUndefined();
|
||||
expect(cmap.lookup(0x16)).toEqual(0x00);
|
||||
expect(cmap.lookup(0x1b)).toEqual(0x05);
|
||||
expect(cmap.lookup(0x1c)).toBeUndefined();
|
||||
});
|
||||
|
||||
it("decodes codespace ranges", async function () {
|
||||
// prettier-ignore
|
||||
const str = "1 begincodespacerange\n" +
|
||||
"<01> <02>\n" +
|
||||
"<00000003> <00000004>\n" +
|
||||
"endcodespacerange\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
const c = {};
|
||||
cmap.readCharCode(String.fromCharCode(1), 0, c);
|
||||
expect(c.charcode).toEqual(1);
|
||||
expect(c.length).toEqual(1);
|
||||
cmap.readCharCode(String.fromCharCode(0, 0, 0, 3), 0, c);
|
||||
expect(c.charcode).toEqual(3);
|
||||
expect(c.length).toEqual(4);
|
||||
});
|
||||
|
||||
it("decodes 4 byte codespace ranges", async function () {
|
||||
// prettier-ignore
|
||||
const str = "1 begincodespacerange\n" +
|
||||
"<8EA1A1A1> <8EA1FEFE>\n" +
|
||||
"endcodespacerange\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
const c = {};
|
||||
cmap.readCharCode(String.fromCharCode(0x8e, 0xa1, 0xa1, 0xa1), 0, c);
|
||||
expect(c.charcode).toEqual(0x8ea1a1a1);
|
||||
expect(c.length).toEqual(4);
|
||||
});
|
||||
|
||||
it("read usecmap", async function () {
|
||||
const str = "/Adobe-Japan1-1 usecmap\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({
|
||||
encoding: stream,
|
||||
fetchBuiltInCMap,
|
||||
useCMap: null,
|
||||
});
|
||||
expect(cmap instanceof CMap).toEqual(true);
|
||||
expect(cmap.useCMap).not.toBeNull();
|
||||
expect(cmap.builtInCMap).toBeFalsy();
|
||||
expect(cmap.length).toEqual(0x20a7);
|
||||
expect(cmap.isIdentityCMap).toEqual(false);
|
||||
});
|
||||
|
||||
it("parses cmapname", async function () {
|
||||
const str = "/CMapName /Identity-H def\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
expect(cmap.name).toEqual("Identity-H");
|
||||
});
|
||||
|
||||
it("parses wmode", async function () {
|
||||
const str = "/WMode 1 def\n";
|
||||
const stream = new StringStream(str);
|
||||
const cmap = await CMapFactory.create({ encoding: stream });
|
||||
expect(cmap.vertical).toEqual(true);
|
||||
});
|
||||
|
||||
it("loads built in cmap", async function () {
|
||||
const cmap = await CMapFactory.create({
|
||||
encoding: Name.get("Adobe-Japan1-1"),
|
||||
fetchBuiltInCMap,
|
||||
useCMap: null,
|
||||
});
|
||||
expect(cmap instanceof CMap).toEqual(true);
|
||||
expect(cmap.useCMap).toBeNull();
|
||||
expect(cmap.builtInCMap).toBeTruthy();
|
||||
expect(cmap.length).toEqual(0x20a7);
|
||||
expect(cmap.isIdentityCMap).toEqual(false);
|
||||
});
|
||||
|
||||
it("loads built in identity cmap", async function () {
|
||||
const cmap = await CMapFactory.create({
|
||||
encoding: Name.get("Identity-H"),
|
||||
fetchBuiltInCMap,
|
||||
useCMap: null,
|
||||
});
|
||||
expect(cmap instanceof IdentityCMap).toEqual(true);
|
||||
expect(cmap.vertical).toEqual(false);
|
||||
expect(cmap.length).toEqual(0x10000);
|
||||
expect(function () {
|
||||
return cmap.isIdentityCMap;
|
||||
}).toThrow(new Error("should not access .isIdentityCMap"));
|
||||
});
|
||||
|
||||
it("attempts to load a non-existent built-in CMap", async function () {
|
||||
try {
|
||||
await CMapFactory.create({
|
||||
encoding: Name.get("null"),
|
||||
fetchBuiltInCMap,
|
||||
useCMap: null,
|
||||
});
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (reason) {
|
||||
expect(reason instanceof Error).toEqual(true);
|
||||
expect(reason.message).toEqual("Unknown CMap name: null");
|
||||
}
|
||||
});
|
||||
|
||||
it("attempts to load a built-in CMap without the necessary API parameters", async function () {
|
||||
function tmpFetchBuiltInCMap(name) {
|
||||
const CMapReaderFactory = new DefaultCMapReaderFactory({});
|
||||
return CMapReaderFactory.fetch({ name });
|
||||
}
|
||||
|
||||
try {
|
||||
await CMapFactory.create({
|
||||
encoding: Name.get("Adobe-Japan1-1"),
|
||||
fetchBuiltInCMap: tmpFetchBuiltInCMap,
|
||||
useCMap: null,
|
||||
});
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (reason) {
|
||||
expect(reason instanceof Error).toEqual(true);
|
||||
expect(reason.message).toEqual(
|
||||
"Ensure that the `cMapUrl` and `cMapPacked` API parameters are provided."
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it("attempts to load a built-in CMap with inconsistent API parameters", async function () {
|
||||
function tmpFetchBuiltInCMap(name) {
|
||||
const CMapReaderFactory = new DefaultCMapReaderFactory({
|
||||
baseUrl: CMAP_URL,
|
||||
isCompressed: false,
|
||||
});
|
||||
return CMapReaderFactory.fetch({ name });
|
||||
}
|
||||
|
||||
try {
|
||||
await CMapFactory.create({
|
||||
encoding: Name.get("Adobe-Japan1-1"),
|
||||
fetchBuiltInCMap: tmpFetchBuiltInCMap,
|
||||
useCMap: null,
|
||||
});
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (reason) {
|
||||
expect(reason instanceof Error).toEqual(true);
|
||||
const message = reason.message;
|
||||
expect(message.startsWith("Unable to load CMap at: ")).toEqual(true);
|
||||
expect(message.endsWith("/external/bcmaps/Adobe-Japan1-1")).toEqual(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
904
test/unit/colorspace_spec.js
Normal file
904
test/unit/colorspace_spec.js
Normal file
@@ -0,0 +1,904 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Dict, Name, Ref } from "../../src/core/primitives.js";
|
||||
import {
|
||||
GlobalColorSpaceCache,
|
||||
LocalColorSpaceCache,
|
||||
} from "../../src/core/image_utils.js";
|
||||
import { Stream, StringStream } from "../../src/core/stream.js";
|
||||
import { ColorSpace } from "../../src/core/colorspace.js";
|
||||
import { ColorSpaceUtils } from "../../src/core/colorspace_utils.js";
|
||||
import { PDFFunctionFactory } from "../../src/core/function.js";
|
||||
import { XRefMock } from "./test_utils.js";
|
||||
|
||||
describe("colorspace", function () {
|
||||
describe("ColorSpace.isDefaultDecode", function () {
|
||||
it("should be true if decode is not an array", function () {
|
||||
expect(ColorSpace.isDefaultDecode("string", 0)).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should be true if length of decode array is not correct", function () {
|
||||
expect(ColorSpace.isDefaultDecode([0], 1)).toBeTruthy();
|
||||
expect(ColorSpace.isDefaultDecode([0, 1, 0], 1)).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should be true if decode map matches the default decode map", function () {
|
||||
expect(ColorSpace.isDefaultDecode([], 0)).toBeTruthy();
|
||||
|
||||
expect(ColorSpace.isDefaultDecode([0, 0], 1)).toBeFalsy();
|
||||
expect(ColorSpace.isDefaultDecode([0, 1], 1)).toBeTruthy();
|
||||
|
||||
expect(ColorSpace.isDefaultDecode([0, 1, 0, 1, 0, 1], 3)).toBeTruthy();
|
||||
expect(ColorSpace.isDefaultDecode([0, 1, 0, 1, 1, 1], 3)).toBeFalsy();
|
||||
|
||||
expect(
|
||||
ColorSpace.isDefaultDecode([0, 1, 0, 1, 0, 1, 0, 1], 4)
|
||||
).toBeTruthy();
|
||||
expect(
|
||||
ColorSpace.isDefaultDecode([1, 0, 0, 1, 0, 1, 0, 1], 4)
|
||||
).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe("ColorSpace caching", function () {
|
||||
let globalColorSpaceCache, localColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
localColorSpaceCache = new LocalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
localColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("caching by Name", function () {
|
||||
const xref = new XRefMock();
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
|
||||
const colorSpace1 = ColorSpaceUtils.parse({
|
||||
cs: Name.get("Pattern"),
|
||||
xref,
|
||||
resources: null,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache,
|
||||
});
|
||||
expect(colorSpace1.name).toEqual("Pattern");
|
||||
|
||||
const colorSpace2 = ColorSpaceUtils.parse({
|
||||
cs: Name.get("Pattern"),
|
||||
xref,
|
||||
resources: null,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache,
|
||||
});
|
||||
expect(colorSpace2.name).toEqual("Pattern");
|
||||
|
||||
const colorSpaceNonCached = ColorSpaceUtils.parse({
|
||||
cs: Name.get("Pattern"),
|
||||
xref,
|
||||
resources: null,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache: new GlobalColorSpaceCache(),
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
expect(colorSpaceNonCached.name).toEqual("Pattern");
|
||||
|
||||
const colorSpaceOther = ColorSpaceUtils.parse({
|
||||
cs: Name.get("RGB"),
|
||||
xref,
|
||||
resources: null,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache,
|
||||
});
|
||||
expect(colorSpaceOther.name).toEqual("DeviceRGB");
|
||||
|
||||
// These two must be *identical* if caching worked as intended.
|
||||
expect(colorSpace1).toBe(colorSpace2);
|
||||
|
||||
expect(colorSpace1).not.toBe(colorSpaceNonCached);
|
||||
expect(colorSpace1).not.toBe(colorSpaceOther);
|
||||
});
|
||||
|
||||
it("caching by Ref", function () {
|
||||
const paramsCalGray = new Dict();
|
||||
paramsCalGray.set("WhitePoint", [1, 1, 1]);
|
||||
paramsCalGray.set("BlackPoint", [0, 0, 0]);
|
||||
paramsCalGray.set("Gamma", 2.0);
|
||||
|
||||
const paramsCalRGB = new Dict();
|
||||
paramsCalRGB.set("WhitePoint", [1, 1, 1]);
|
||||
paramsCalRGB.set("BlackPoint", [0, 0, 0]);
|
||||
paramsCalRGB.set("Gamma", [1, 1, 1]);
|
||||
paramsCalRGB.set("Matrix", [1, 0, 0, 0, 1, 0, 0, 0, 1]);
|
||||
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: Ref.get(50, 0),
|
||||
data: [Name.get("CalGray"), paramsCalGray],
|
||||
},
|
||||
{
|
||||
ref: Ref.get(100, 0),
|
||||
data: [Name.get("CalRGB"), paramsCalRGB],
|
||||
},
|
||||
]);
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
|
||||
const colorSpace1 = ColorSpaceUtils.parse({
|
||||
cs: Ref.get(50, 0),
|
||||
xref,
|
||||
resources: null,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache,
|
||||
});
|
||||
expect(colorSpace1.name).toEqual("CalGray");
|
||||
|
||||
const colorSpace2 = ColorSpaceUtils.parse({
|
||||
cs: Ref.get(50, 0),
|
||||
xref,
|
||||
resources: null,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache,
|
||||
});
|
||||
expect(colorSpace2.name).toEqual("CalGray");
|
||||
|
||||
const colorSpaceNonCached = ColorSpaceUtils.parse({
|
||||
cs: Ref.get(50, 0),
|
||||
xref,
|
||||
resources: null,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache: new GlobalColorSpaceCache(),
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
expect(colorSpaceNonCached.name).toEqual("CalGray");
|
||||
|
||||
const colorSpaceOther = ColorSpaceUtils.parse({
|
||||
cs: Ref.get(100, 0),
|
||||
xref,
|
||||
resources: null,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache,
|
||||
});
|
||||
expect(colorSpaceOther.name).toEqual("CalRGB");
|
||||
|
||||
// These two must be *identical* if caching worked as intended.
|
||||
expect(colorSpace1).toBe(colorSpace2);
|
||||
|
||||
expect(colorSpace1).not.toBe(colorSpaceNonCached);
|
||||
expect(colorSpace1).not.toBe(colorSpaceOther);
|
||||
});
|
||||
});
|
||||
|
||||
describe("DeviceGrayCS", function () {
|
||||
let globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should handle the case when cs is a Name object", function () {
|
||||
const cs = Name.get("DeviceGray");
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: Ref.get(10, 0),
|
||||
data: new Dict(),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
const testSrc = new Uint8Array([27, 125, 250, 131]);
|
||||
const testDest = new Uint8ClampedArray(4 * 4 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
27, 27, 27,
|
||||
27, 27, 27,
|
||||
125, 125, 125,
|
||||
125, 125, 125,
|
||||
27, 27, 27,
|
||||
27, 27, 27,
|
||||
125, 125, 125,
|
||||
125, 125, 125,
|
||||
250, 250, 250,
|
||||
250, 250, 250,
|
||||
131, 131, 131,
|
||||
131, 131, 131,
|
||||
250, 250, 250,
|
||||
250, 250, 250,
|
||||
131, 131, 131,
|
||||
131, 131, 131
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 4, 4, 4, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb(new Float32Array([0.1]), 0)).toEqual(
|
||||
new Uint8ClampedArray([26, 26, 26])
|
||||
);
|
||||
expect(colorSpace.getOutputLength(2, 0)).toEqual(6);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
it("should handle the case when cs is an indirect object", function () {
|
||||
const cs = Ref.get(10, 0);
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: cs,
|
||||
data: Name.get("DeviceGray"),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
const testSrc = new Uint8Array([27, 125, 250, 131]);
|
||||
const testDest = new Uint8ClampedArray(3 * 3 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
27, 27, 27,
|
||||
27, 27, 27,
|
||||
125, 125, 125,
|
||||
27, 27, 27,
|
||||
27, 27, 27,
|
||||
125, 125, 125,
|
||||
250, 250, 250,
|
||||
250, 250, 250,
|
||||
131, 131, 131
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 3, 3, 3, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb(new Float32Array([0.2]), 0)).toEqual(
|
||||
new Uint8ClampedArray([51, 51, 51])
|
||||
);
|
||||
expect(colorSpace.getOutputLength(3, 1)).toEqual(12);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
});
|
||||
|
||||
describe("DeviceRgbCS", function () {
|
||||
let globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should handle the case when cs is a Name object", function () {
|
||||
const cs = Name.get("DeviceRGB");
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: Ref.get(10, 0),
|
||||
data: new Dict(),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
const testSrc = new Uint8Array([
|
||||
27, 125, 250,
|
||||
131, 139, 140,
|
||||
111, 25, 198,
|
||||
21, 147, 255
|
||||
]);
|
||||
const testDest = new Uint8ClampedArray(4 * 4 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
27, 125, 250,
|
||||
27, 125, 250,
|
||||
131, 139, 140,
|
||||
131, 139, 140,
|
||||
27, 125, 250,
|
||||
27, 125, 250,
|
||||
131, 139, 140,
|
||||
131, 139, 140,
|
||||
111, 25, 198,
|
||||
111, 25, 198,
|
||||
21, 147, 255,
|
||||
21, 147, 255,
|
||||
111, 25, 198,
|
||||
111, 25, 198,
|
||||
21, 147, 255,
|
||||
21, 147, 255
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 4, 4, 4, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb(new Float32Array([0.1, 0.2, 0.3]), 0)).toEqual(
|
||||
new Uint8ClampedArray([26, 51, 77])
|
||||
);
|
||||
expect(colorSpace.getOutputLength(4, 0)).toEqual(4);
|
||||
expect(colorSpace.isPassthrough(8)).toBeTruthy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
it("should handle the case when cs is an indirect object", function () {
|
||||
const cs = Ref.get(10, 0);
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: cs,
|
||||
data: Name.get("DeviceRGB"),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
const testSrc = new Uint8Array([
|
||||
27, 125, 250,
|
||||
131, 139, 140,
|
||||
111, 25, 198,
|
||||
21, 147, 255
|
||||
]);
|
||||
const testDest = new Uint8ClampedArray(3 * 3 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
27, 125, 250,
|
||||
27, 125, 250,
|
||||
131, 139, 140,
|
||||
27, 125, 250,
|
||||
27, 125, 250,
|
||||
131, 139, 140,
|
||||
111, 25, 198,
|
||||
111, 25, 198,
|
||||
21, 147, 255
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 3, 3, 3, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb(new Float32Array([0.1, 0.2, 0.3]), 0)).toEqual(
|
||||
new Uint8ClampedArray([26, 51, 77])
|
||||
);
|
||||
expect(colorSpace.getOutputLength(4, 1)).toEqual(5);
|
||||
expect(colorSpace.isPassthrough(8)).toBeTruthy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
});
|
||||
|
||||
describe("DeviceCmykCS", function () {
|
||||
let globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should handle the case when cs is a Name object", function () {
|
||||
const cs = Name.get("DeviceCMYK");
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: Ref.get(10, 0),
|
||||
data: new Dict(),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
const testSrc = new Uint8Array([
|
||||
27, 125, 250, 128,
|
||||
131, 139, 140, 45,
|
||||
111, 25, 198, 78,
|
||||
21, 147, 255, 69
|
||||
]);
|
||||
const testDest = new Uint8ClampedArray(4 * 4 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
135, 81, 18,
|
||||
135, 81, 18,
|
||||
114, 102, 97,
|
||||
114, 102, 97,
|
||||
135, 81, 18,
|
||||
135, 81, 18,
|
||||
114, 102, 97,
|
||||
114, 102, 97,
|
||||
112, 144, 75,
|
||||
112, 144, 75,
|
||||
188, 98, 27,
|
||||
188, 98, 27,
|
||||
112, 144, 75,
|
||||
112, 144, 75,
|
||||
188, 98, 27,
|
||||
188, 98, 27
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 4, 4, 4, 8, testSrc, 0);
|
||||
|
||||
expect(
|
||||
colorSpace.getRgb(new Float32Array([0.1, 0.2, 0.3, 1]), 0)
|
||||
).toEqual(new Uint8ClampedArray([32, 28, 21]));
|
||||
expect(colorSpace.getOutputLength(4, 0)).toEqual(3);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
it("should handle the case when cs is an indirect object", function () {
|
||||
const cs = Ref.get(10, 0);
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: cs,
|
||||
data: Name.get("DeviceCMYK"),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
const testSrc = new Uint8Array([
|
||||
27, 125, 250, 128,
|
||||
131, 139, 140, 45,
|
||||
111, 25, 198, 78,
|
||||
21, 147, 255, 69
|
||||
]);
|
||||
const testDest = new Uint8ClampedArray(3 * 3 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
135, 81, 18,
|
||||
135, 81, 18,
|
||||
114, 102, 97,
|
||||
135, 81, 18,
|
||||
135, 81, 18,
|
||||
114, 102, 97,
|
||||
112, 144, 75,
|
||||
112, 144, 75,
|
||||
188, 98, 27
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 3, 3, 3, 8, testSrc, 0);
|
||||
|
||||
expect(
|
||||
colorSpace.getRgb(new Float32Array([0.1, 0.2, 0.3, 1]), 0)
|
||||
).toEqual(new Uint8ClampedArray([32, 28, 21]));
|
||||
expect(colorSpace.getOutputLength(4, 1)).toEqual(4);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
});
|
||||
|
||||
describe("CalGrayCS", function () {
|
||||
let globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should handle the case when cs is an array", function () {
|
||||
const params = new Dict();
|
||||
params.set("WhitePoint", [1, 1, 1]);
|
||||
params.set("BlackPoint", [0, 0, 0]);
|
||||
params.set("Gamma", 2.0);
|
||||
|
||||
const cs = [Name.get("CalGray"), params];
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: Ref.get(10, 0),
|
||||
data: new Dict(),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
const testSrc = new Uint8Array([27, 125, 250, 131]);
|
||||
const testDest = new Uint8ClampedArray(4 * 4 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
25, 25, 25,
|
||||
25, 25, 25,
|
||||
143, 143, 143,
|
||||
143, 143, 143,
|
||||
25, 25, 25,
|
||||
25, 25, 25,
|
||||
143, 143, 143,
|
||||
143, 143, 143,
|
||||
251, 251, 251,
|
||||
251, 251, 251,
|
||||
149, 149, 149,
|
||||
149, 149, 149,
|
||||
251, 251, 251,
|
||||
251, 251, 251,
|
||||
149, 149, 149,
|
||||
149, 149, 149
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 4, 4, 4, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb(new Float32Array([1.0]), 0)).toEqual(
|
||||
new Uint8ClampedArray([255, 255, 255])
|
||||
);
|
||||
expect(colorSpace.getOutputLength(4, 0)).toEqual(12);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
});
|
||||
|
||||
describe("CalRGBCS", function () {
|
||||
let globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should handle the case when cs is an array", function () {
|
||||
const params = new Dict();
|
||||
params.set("WhitePoint", [1, 1, 1]);
|
||||
params.set("BlackPoint", [0, 0, 0]);
|
||||
params.set("Gamma", [1, 1, 1]);
|
||||
params.set("Matrix", [1, 0, 0, 0, 1, 0, 0, 0, 1]);
|
||||
|
||||
const cs = [Name.get("CalRGB"), params];
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: Ref.get(10, 0),
|
||||
data: new Dict(),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
const testSrc = new Uint8Array([
|
||||
27, 125, 250,
|
||||
131, 139, 140,
|
||||
111, 25, 198,
|
||||
21, 147, 255
|
||||
]);
|
||||
const testDest = new Uint8ClampedArray(3 * 3 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
0, 238, 255,
|
||||
0, 238, 255,
|
||||
185, 196, 195,
|
||||
0, 238, 255,
|
||||
0, 238, 255,
|
||||
185, 196, 195,
|
||||
235, 0, 243,
|
||||
235, 0, 243,
|
||||
0, 255, 255
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 3, 3, 3, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb(new Float32Array([0.1, 0.2, 0.3]), 0)).toEqual(
|
||||
new Uint8ClampedArray([0, 147, 151])
|
||||
);
|
||||
expect(colorSpace.getOutputLength(4, 0)).toEqual(4);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
});
|
||||
|
||||
describe("LabCS", function () {
|
||||
let globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should handle the case when cs is an array", function () {
|
||||
const params = new Dict();
|
||||
params.set("WhitePoint", [1, 1, 1]);
|
||||
params.set("BlackPoint", [0, 0, 0]);
|
||||
params.set("Range", [-100, 100, -100, 100]);
|
||||
|
||||
const cs = [Name.get("Lab"), params];
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: Ref.get(10, 0),
|
||||
data: new Dict(),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
const testSrc = new Uint8Array([
|
||||
27, 25, 50,
|
||||
31, 19, 40,
|
||||
11, 25, 98,
|
||||
21, 47, 55
|
||||
]);
|
||||
const testDest = new Uint8ClampedArray(3 * 3 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
0, 49, 101,
|
||||
0, 49, 101,
|
||||
0, 53, 117,
|
||||
0, 49, 101,
|
||||
0, 49, 101,
|
||||
0, 53, 117,
|
||||
0, 41, 40,
|
||||
0, 41, 40,
|
||||
0, 43, 90
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 3, 3, 3, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb([55, 25, 35], 0)).toEqual(
|
||||
new Uint8ClampedArray([188, 100, 61])
|
||||
);
|
||||
expect(colorSpace.getOutputLength(4, 0)).toEqual(4);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(colorSpace.isDefaultDecode([0, 1])).toBeTruthy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
});
|
||||
|
||||
describe("IndexedCS", function () {
|
||||
let globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should handle the case when cs is an array", function () {
|
||||
// prettier-ignore
|
||||
const lookup = new Stream(
|
||||
new Uint8Array([
|
||||
23, 155, 35,
|
||||
147, 69, 93,
|
||||
255, 109, 70
|
||||
])
|
||||
);
|
||||
const cs = [Name.get("Indexed"), Name.get("DeviceRGB"), 2, lookup];
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: Ref.get(10, 0),
|
||||
data: new Dict(),
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
const testSrc = new Uint8Array([2, 2, 0, 1]);
|
||||
const testDest = new Uint8ClampedArray(3 * 3 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
255, 109, 70,
|
||||
255, 109, 70,
|
||||
255, 109, 70,
|
||||
255, 109, 70,
|
||||
255, 109, 70,
|
||||
255, 109, 70,
|
||||
23, 155, 35,
|
||||
23, 155, 35,
|
||||
147, 69, 93,
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 3, 3, 3, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb([2], 0)).toEqual(
|
||||
new Uint8ClampedArray([255, 109, 70])
|
||||
);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(colorSpace.isDefaultDecode([0, 1], 1)).toBeTruthy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
});
|
||||
|
||||
describe("AlternateCS", function () {
|
||||
let globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should handle the case when cs is an array", function () {
|
||||
const fnDict = new Dict();
|
||||
fnDict.set("FunctionType", 4);
|
||||
fnDict.set("Domain", [0.0, 1.0]);
|
||||
fnDict.set("Range", [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0]);
|
||||
fnDict.set("Length", 58);
|
||||
|
||||
let fn = new StringStream(
|
||||
"{ dup 0.84 mul " +
|
||||
"exch 0.00 exch " +
|
||||
"dup 0.44 mul " +
|
||||
"exch 0.21 mul }"
|
||||
);
|
||||
fn = new Stream(fn.bytes, 0, 58, fnDict);
|
||||
|
||||
const fnRef = Ref.get(10, 0);
|
||||
|
||||
const cs = [
|
||||
Name.get("Separation"),
|
||||
Name.get("LogoGreen"),
|
||||
Name.get("DeviceCMYK"),
|
||||
fnRef,
|
||||
];
|
||||
const xref = new XRefMock([
|
||||
{
|
||||
ref: fnRef,
|
||||
data: fn,
|
||||
},
|
||||
]);
|
||||
const resources = new Dict();
|
||||
|
||||
const pdfFunctionFactory = new PDFFunctionFactory({
|
||||
xref,
|
||||
});
|
||||
const colorSpace = ColorSpaceUtils.parse({
|
||||
cs,
|
||||
xref,
|
||||
resources,
|
||||
pdfFunctionFactory,
|
||||
globalColorSpaceCache,
|
||||
localColorSpaceCache: new LocalColorSpaceCache(),
|
||||
});
|
||||
|
||||
const testSrc = new Uint8Array([27, 25, 50, 31]);
|
||||
const testDest = new Uint8ClampedArray(3 * 3 * 3);
|
||||
// prettier-ignore
|
||||
const expectedDest = new Uint8ClampedArray([
|
||||
226, 242, 241,
|
||||
226, 242, 241,
|
||||
229, 244, 242,
|
||||
226, 242, 241,
|
||||
226, 242, 241,
|
||||
229, 244, 242,
|
||||
203, 232, 229,
|
||||
203, 232, 229,
|
||||
222, 241, 238
|
||||
]);
|
||||
colorSpace.fillRgb(testDest, 2, 2, 3, 3, 3, 8, testSrc, 0);
|
||||
|
||||
expect(colorSpace.getRgb([0.1], 0)).toEqual(
|
||||
new Uint8ClampedArray([228, 243, 242])
|
||||
);
|
||||
expect(colorSpace.isPassthrough(8)).toBeFalsy();
|
||||
expect(colorSpace.isDefaultDecode([0, 1])).toBeTruthy();
|
||||
expect(testDest).toEqual(expectedDest);
|
||||
});
|
||||
});
|
||||
});
|
||||
90
test/unit/common_pdfstream_tests.js
Normal file
90
test/unit/common_pdfstream_tests.js
Normal file
@@ -0,0 +1,90 @@
|
||||
/* Copyright 2024 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { AbortException, isNodeJS } from "../../src/shared/util.js";
|
||||
import { getCrossOriginHostname, TestPdfsServer } from "./test_utils.js";
|
||||
|
||||
// Common tests to verify behavior across implementations of the IPDFStream
|
||||
// interface:
|
||||
// - PDFNetworkStream by network_spec.js
|
||||
// - PDFFetchStream by fetch_stream_spec.js
|
||||
async function testCrossOriginRedirects({
|
||||
PDFStreamClass,
|
||||
redirectIfRange,
|
||||
testRangeReader,
|
||||
}) {
|
||||
const basicApiUrl = TestPdfsServer.resolveURL("basicapi.pdf").href;
|
||||
const basicApiFileLength = 105779;
|
||||
|
||||
const rangeSize = 32768;
|
||||
const stream = new PDFStreamClass({
|
||||
url: getCrossOriginUrlWithRedirects(basicApiUrl, redirectIfRange),
|
||||
length: basicApiFileLength,
|
||||
rangeChunkSize: rangeSize,
|
||||
disableStream: true,
|
||||
disableRange: false,
|
||||
});
|
||||
|
||||
const fullReader = stream.getFullReader();
|
||||
|
||||
await fullReader.headersReady;
|
||||
// Sanity check: We can only test range requests if supported:
|
||||
expect(fullReader.isRangeSupported).toEqual(true);
|
||||
// ^ When range requests are supported (and streaming is disabled), the full
|
||||
// initial request is aborted and we do not need to call fullReader.cancel().
|
||||
|
||||
const rangeReader = stream.getRangeReader(
|
||||
basicApiFileLength - rangeSize,
|
||||
basicApiFileLength
|
||||
);
|
||||
|
||||
try {
|
||||
await testRangeReader(rangeReader);
|
||||
} finally {
|
||||
rangeReader.cancel(new AbortException("Don't need rangeReader"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} testserverUrl - A URL handled that supports CORS and
|
||||
* redirects (see crossOriginHandler and redirectHandler in webserver.mjs).
|
||||
* @param {boolean} redirectIfRange - Whether Range requests should be
|
||||
* redirected to a different origin compared to the initial request.
|
||||
* @returns {string} A URL that will be redirected by the server.
|
||||
*/
|
||||
function getCrossOriginUrlWithRedirects(testserverUrl, redirectIfRange) {
|
||||
const url = new URL(testserverUrl);
|
||||
if (!isNodeJS) {
|
||||
// The responses are going to be cross-origin. In Node.js, fetch() allows
|
||||
// cross-origin requests for any request, but in browser environments we
|
||||
// need to enable CORS.
|
||||
// This option depends on crossOriginHandler in webserver.mjs.
|
||||
url.searchParams.set("cors", "withoutCredentials");
|
||||
}
|
||||
|
||||
// This redirect options depend on redirectHandler in webserver.mjs.
|
||||
|
||||
// We will change the host to a cross-origin domain so that the initial
|
||||
// request will be cross-origin. Set "redirectToHost" to the original host
|
||||
// to force a cross-origin redirect (relative to the initial URL).
|
||||
url.searchParams.set("redirectToHost", url.hostname);
|
||||
url.hostname = getCrossOriginHostname(url.hostname);
|
||||
if (redirectIfRange) {
|
||||
url.searchParams.set("redirectIfRange", "1");
|
||||
}
|
||||
return url.href;
|
||||
}
|
||||
|
||||
export { testCrossOriginRedirects };
|
||||
493
test/unit/core_utils_spec.js
Normal file
493
test/unit/core_utils_spec.js
Normal file
@@ -0,0 +1,493 @@
|
||||
/* Copyright 2019 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
arrayBuffersToBytes,
|
||||
encodeToXmlString,
|
||||
escapePDFName,
|
||||
escapeString,
|
||||
getInheritableProperty,
|
||||
getSizeInBytes,
|
||||
isAscii,
|
||||
isWhiteSpace,
|
||||
log2,
|
||||
numberToString,
|
||||
parseXFAPath,
|
||||
recoverJsURL,
|
||||
stringToUTF16HexString,
|
||||
stringToUTF16String,
|
||||
toRomanNumerals,
|
||||
validateCSSFont,
|
||||
} from "../../src/core/core_utils.js";
|
||||
import { Dict, Ref } from "../../src/core/primitives.js";
|
||||
import { XRefMock } from "./test_utils.js";
|
||||
|
||||
describe("core_utils", function () {
|
||||
describe("arrayBuffersToBytes", function () {
|
||||
it("handles zero ArrayBuffers", function () {
|
||||
const bytes = arrayBuffersToBytes([]);
|
||||
|
||||
expect(bytes).toEqual(new Uint8Array(0));
|
||||
});
|
||||
|
||||
it("handles one ArrayBuffer", function () {
|
||||
const buffer = new Uint8Array([1, 2, 3]).buffer;
|
||||
const bytes = arrayBuffersToBytes([buffer]);
|
||||
|
||||
expect(bytes).toEqual(new Uint8Array([1, 2, 3]));
|
||||
// Ensure that the fast-path works correctly.
|
||||
expect(bytes.buffer).toBe(buffer);
|
||||
});
|
||||
|
||||
it("handles multiple ArrayBuffers", function () {
|
||||
const buffer1 = new Uint8Array([1, 2, 3]).buffer,
|
||||
buffer2 = new Uint8Array(0).buffer,
|
||||
buffer3 = new Uint8Array([4, 5]).buffer;
|
||||
const bytes = arrayBuffersToBytes([buffer1, buffer2, buffer3]);
|
||||
|
||||
expect(bytes).toEqual(new Uint8Array([1, 2, 3, 4, 5]));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getInheritableProperty", function () {
|
||||
it("handles non-dictionary arguments", function () {
|
||||
expect(getInheritableProperty({ dict: null, key: "foo" })).toEqual(
|
||||
undefined
|
||||
);
|
||||
expect(getInheritableProperty({ dict: undefined, key: "foo" })).toEqual(
|
||||
undefined
|
||||
);
|
||||
});
|
||||
|
||||
it("handles dictionaries that do not contain the property", function () {
|
||||
// Empty dictionary.
|
||||
const emptyDict = new Dict();
|
||||
expect(getInheritableProperty({ dict: emptyDict, key: "foo" })).toEqual(
|
||||
undefined
|
||||
);
|
||||
|
||||
// Filled dictionary with a different property.
|
||||
const filledDict = new Dict();
|
||||
filledDict.set("bar", "baz");
|
||||
expect(getInheritableProperty({ dict: filledDict, key: "foo" })).toEqual(
|
||||
undefined
|
||||
);
|
||||
});
|
||||
|
||||
it("fetches the property if it is not inherited", function () {
|
||||
const ref = Ref.get(10, 0);
|
||||
const xref = new XRefMock([{ ref, data: "quux" }]);
|
||||
const dict = new Dict(xref);
|
||||
|
||||
// Regular values should be fetched.
|
||||
dict.set("foo", "bar");
|
||||
expect(getInheritableProperty({ dict, key: "foo" })).toEqual("bar");
|
||||
|
||||
// Array value should be fetched (with references resolved).
|
||||
dict.set("baz", ["qux", ref]);
|
||||
expect(
|
||||
getInheritableProperty({ dict, key: "baz", getArray: true })
|
||||
).toEqual(["qux", "quux"]);
|
||||
});
|
||||
|
||||
it("fetches the property if it is inherited and present on one level", function () {
|
||||
const ref = Ref.get(10, 0);
|
||||
const xref = new XRefMock([{ ref, data: "quux" }]);
|
||||
const firstDict = new Dict(xref);
|
||||
const secondDict = new Dict(xref);
|
||||
firstDict.set("Parent", secondDict);
|
||||
|
||||
// Regular values should be fetched.
|
||||
secondDict.set("foo", "bar");
|
||||
expect(getInheritableProperty({ dict: firstDict, key: "foo" })).toEqual(
|
||||
"bar"
|
||||
);
|
||||
|
||||
// Array value should be fetched (with references resolved).
|
||||
secondDict.set("baz", ["qux", ref]);
|
||||
expect(
|
||||
getInheritableProperty({ dict: firstDict, key: "baz", getArray: true })
|
||||
).toEqual(["qux", "quux"]);
|
||||
});
|
||||
|
||||
it("fetches the property if it is inherited and present on multiple levels", function () {
|
||||
const ref = Ref.get(10, 0);
|
||||
const xref = new XRefMock([{ ref, data: "quux" }]);
|
||||
const firstDict = new Dict(xref);
|
||||
const secondDict = new Dict(xref);
|
||||
firstDict.set("Parent", secondDict);
|
||||
|
||||
// Regular values should be fetched.
|
||||
firstDict.set("foo", "bar1");
|
||||
secondDict.set("foo", "bar2");
|
||||
expect(getInheritableProperty({ dict: firstDict, key: "foo" })).toEqual(
|
||||
"bar1"
|
||||
);
|
||||
expect(
|
||||
getInheritableProperty({
|
||||
dict: firstDict,
|
||||
key: "foo",
|
||||
getArray: false,
|
||||
stopWhenFound: false,
|
||||
})
|
||||
).toEqual(["bar1", "bar2"]);
|
||||
|
||||
// Array value should be fetched (with references resolved).
|
||||
firstDict.set("baz", ["qux1", ref]);
|
||||
secondDict.set("baz", ["qux2", ref]);
|
||||
expect(
|
||||
getInheritableProperty({
|
||||
dict: firstDict,
|
||||
key: "baz",
|
||||
getArray: true,
|
||||
stopWhenFound: false,
|
||||
})
|
||||
).toEqual([
|
||||
["qux1", "quux"],
|
||||
["qux2", "quux"],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("toRomanNumerals", function () {
|
||||
it("handles invalid arguments", function () {
|
||||
for (const input of ["foo", -1, 0]) {
|
||||
expect(function () {
|
||||
toRomanNumerals(input);
|
||||
}).toThrow(new Error("The number should be a positive integer."));
|
||||
}
|
||||
});
|
||||
|
||||
it("converts numbers to uppercase Roman numerals", function () {
|
||||
expect(toRomanNumerals(1)).toEqual("I");
|
||||
expect(toRomanNumerals(6)).toEqual("VI");
|
||||
expect(toRomanNumerals(7)).toEqual("VII");
|
||||
expect(toRomanNumerals(8)).toEqual("VIII");
|
||||
expect(toRomanNumerals(10)).toEqual("X");
|
||||
expect(toRomanNumerals(40)).toEqual("XL");
|
||||
expect(toRomanNumerals(100)).toEqual("C");
|
||||
expect(toRomanNumerals(500)).toEqual("D");
|
||||
expect(toRomanNumerals(1000)).toEqual("M");
|
||||
expect(toRomanNumerals(2019)).toEqual("MMXIX");
|
||||
});
|
||||
|
||||
it("converts numbers to lowercase Roman numerals", function () {
|
||||
expect(toRomanNumerals(1, /* lowercase = */ true)).toEqual("i");
|
||||
expect(toRomanNumerals(6, /* lowercase = */ true)).toEqual("vi");
|
||||
expect(toRomanNumerals(7, /* lowercase = */ true)).toEqual("vii");
|
||||
expect(toRomanNumerals(8, /* lowercase = */ true)).toEqual("viii");
|
||||
expect(toRomanNumerals(10, /* lowercase = */ true)).toEqual("x");
|
||||
expect(toRomanNumerals(40, /* lowercase = */ true)).toEqual("xl");
|
||||
expect(toRomanNumerals(100, /* lowercase = */ true)).toEqual("c");
|
||||
expect(toRomanNumerals(500, /* lowercase = */ true)).toEqual("d");
|
||||
expect(toRomanNumerals(1000, /* lowercase = */ true)).toEqual("m");
|
||||
expect(toRomanNumerals(2019, /* lowercase = */ true)).toEqual("mmxix");
|
||||
});
|
||||
});
|
||||
|
||||
describe("log2", function () {
|
||||
it("handles values smaller than/equal to zero", function () {
|
||||
expect(log2(0)).toEqual(0);
|
||||
expect(log2(-1)).toEqual(0);
|
||||
});
|
||||
|
||||
it("handles values larger than zero", function () {
|
||||
expect(log2(1)).toEqual(0);
|
||||
expect(log2(2)).toEqual(1);
|
||||
expect(log2(3)).toEqual(2);
|
||||
expect(log2(3.14)).toEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("numberToString", function () {
|
||||
it("should stringify integers", function () {
|
||||
expect(numberToString(1)).toEqual("1");
|
||||
expect(numberToString(0)).toEqual("0");
|
||||
expect(numberToString(-1)).toEqual("-1");
|
||||
});
|
||||
|
||||
it("should stringify floats", function () {
|
||||
expect(numberToString(1.0)).toEqual("1");
|
||||
expect(numberToString(1.2)).toEqual("1.2");
|
||||
expect(numberToString(1.23)).toEqual("1.23");
|
||||
expect(numberToString(1.234)).toEqual("1.23");
|
||||
});
|
||||
});
|
||||
|
||||
describe("isWhiteSpace", function () {
|
||||
it("handles space characters", function () {
|
||||
expect(isWhiteSpace(0x20)).toEqual(true);
|
||||
expect(isWhiteSpace(0x09)).toEqual(true);
|
||||
expect(isWhiteSpace(0x0d)).toEqual(true);
|
||||
expect(isWhiteSpace(0x0a)).toEqual(true);
|
||||
});
|
||||
|
||||
it("handles non-space characters", function () {
|
||||
expect(isWhiteSpace(0x0b)).toEqual(false);
|
||||
expect(isWhiteSpace(null)).toEqual(false);
|
||||
expect(isWhiteSpace(undefined)).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseXFAPath", function () {
|
||||
it("should get a correctly parsed path", function () {
|
||||
const path = "foo.bar[12].oof[3].rab.FOO[123].BAR[456]";
|
||||
expect(parseXFAPath(path)).toEqual([
|
||||
{ name: "foo", pos: 0 },
|
||||
{ name: "bar", pos: 12 },
|
||||
{ name: "oof", pos: 3 },
|
||||
{ name: "rab", pos: 0 },
|
||||
{ name: "FOO", pos: 123 },
|
||||
{ name: "BAR", pos: 456 },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("recoverJsURL", function () {
|
||||
it("should get valid URLs without `newWindow` property", function () {
|
||||
const inputs = [
|
||||
"window.open('https://test.local')",
|
||||
"window.open('https://test.local', true)",
|
||||
"app.launchURL('https://test.local')",
|
||||
"app.launchURL('https://test.local', false)",
|
||||
"xfa.host.gotoURL('https://test.local')",
|
||||
"xfa.host.gotoURL('https://test.local', true)",
|
||||
];
|
||||
|
||||
for (const input of inputs) {
|
||||
expect(recoverJsURL(input)).toEqual({
|
||||
url: "https://test.local",
|
||||
newWindow: false,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("should get valid URLs with `newWindow` property", function () {
|
||||
const input = "app.launchURL('https://test.local', true)";
|
||||
expect(recoverJsURL(input)).toEqual({
|
||||
url: "https://test.local",
|
||||
newWindow: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("should not get invalid URLs", function () {
|
||||
const input = "navigateToUrl('https://test.local')";
|
||||
expect(recoverJsURL(input)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("escapePDFName", function () {
|
||||
it("should escape PDF name", function () {
|
||||
expect(escapePDFName("hello")).toEqual("hello");
|
||||
expect(escapePDFName("\xfehello")).toEqual("#fehello");
|
||||
expect(escapePDFName("he\xfell\xffo")).toEqual("he#fell#ffo");
|
||||
expect(escapePDFName("\xfehe\xfell\xffo\xff")).toEqual(
|
||||
"#fehe#fell#ffo#ff"
|
||||
);
|
||||
expect(escapePDFName("#h#e#l#l#o")).toEqual("#23h#23e#23l#23l#23o");
|
||||
expect(escapePDFName("#()<>[]{}/%")).toEqual(
|
||||
"#23#28#29#3c#3e#5b#5d#7b#7d#2f#25"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("escapeString", function () {
|
||||
it("should escape (, ), \\n, \\r, and \\", function () {
|
||||
expect(escapeString("((a\\a))\n(b(b\\b)\rb)")).toEqual(
|
||||
"\\(\\(a\\\\a\\)\\)\\n\\(b\\(b\\\\b\\)\\rb\\)"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("encodeToXmlString", function () {
|
||||
it("should get a correctly encoded string with some entities", function () {
|
||||
const str = "\"\u0397ell😂' & <W😂rld>";
|
||||
expect(encodeToXmlString(str)).toEqual(
|
||||
""Ηell😂' & <W😂rld>"
|
||||
);
|
||||
});
|
||||
|
||||
it("should get a correctly encoded basic ascii string", function () {
|
||||
const str = "hello world";
|
||||
expect(encodeToXmlString(str)).toEqual(str);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateCSSFont", function () {
|
||||
it("Check font family", function () {
|
||||
const cssFontInfo = {
|
||||
fontFamily: `"blah blah " blah blah"`,
|
||||
fontWeight: 0,
|
||||
italicAngle: 0,
|
||||
};
|
||||
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = `"blah blah \\" blah blah"`;
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(true);
|
||||
|
||||
cssFontInfo.fontFamily = `'blah blah ' blah blah'`;
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = `'blah blah \\' blah blah'`;
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(true);
|
||||
|
||||
cssFontInfo.fontFamily = `"blah blah `;
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = `blah blah"`;
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = `'blah blah `;
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = `blah blah'`;
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = "blah blah blah";
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(true);
|
||||
|
||||
cssFontInfo.fontFamily = "blah 0blah blah";
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = "blah blah -0blah";
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = "blah blah --blah";
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
|
||||
cssFontInfo.fontFamily = "blah blah -blah";
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(true);
|
||||
|
||||
cssFontInfo.fontFamily = "blah fdqAJqjHJK23kl23__--Kj blah";
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(true);
|
||||
|
||||
cssFontInfo.fontFamily = "blah fdqAJqjH$JK23kl23__--Kj blah";
|
||||
expect(validateCSSFont(cssFontInfo)).toEqual(false);
|
||||
});
|
||||
|
||||
it("Check font weight", function () {
|
||||
const cssFontInfo = {
|
||||
fontFamily: "blah",
|
||||
fontWeight: 100,
|
||||
italicAngle: 0,
|
||||
};
|
||||
|
||||
validateCSSFont(cssFontInfo);
|
||||
expect(cssFontInfo.fontWeight).toEqual("100");
|
||||
|
||||
cssFontInfo.fontWeight = "700";
|
||||
validateCSSFont(cssFontInfo);
|
||||
expect(cssFontInfo.fontWeight).toEqual("700");
|
||||
|
||||
cssFontInfo.fontWeight = "normal";
|
||||
validateCSSFont(cssFontInfo);
|
||||
expect(cssFontInfo.fontWeight).toEqual("normal");
|
||||
|
||||
cssFontInfo.fontWeight = 314;
|
||||
validateCSSFont(cssFontInfo);
|
||||
expect(cssFontInfo.fontWeight).toEqual("400");
|
||||
});
|
||||
|
||||
it("Check italic angle", function () {
|
||||
const cssFontInfo = {
|
||||
fontFamily: "blah",
|
||||
fontWeight: 100,
|
||||
italicAngle: 10,
|
||||
};
|
||||
validateCSSFont(cssFontInfo);
|
||||
expect(cssFontInfo.italicAngle).toEqual("10");
|
||||
|
||||
cssFontInfo.italicAngle = -123;
|
||||
validateCSSFont(cssFontInfo);
|
||||
expect(cssFontInfo.italicAngle).toEqual("14");
|
||||
|
||||
cssFontInfo.italicAngle = "91";
|
||||
validateCSSFont(cssFontInfo);
|
||||
expect(cssFontInfo.italicAngle).toEqual("14");
|
||||
|
||||
cssFontInfo.italicAngle = 2.718;
|
||||
validateCSSFont(cssFontInfo);
|
||||
expect(cssFontInfo.italicAngle).toEqual("2.718");
|
||||
});
|
||||
});
|
||||
|
||||
describe("isAscii", function () {
|
||||
it("handles ascii/non-ascii strings", function () {
|
||||
expect(isAscii("hello world")).toEqual(true);
|
||||
expect(isAscii("こんにちは世界の")).toEqual(false);
|
||||
expect(isAscii("hello world in Japanese is こんにちは世界の")).toEqual(
|
||||
false
|
||||
);
|
||||
expect(isAscii("")).toEqual(true);
|
||||
expect(isAscii(123)).toEqual(false);
|
||||
expect(isAscii(null)).toEqual(false);
|
||||
expect(isAscii(undefined)).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("stringToUTF16HexString", function () {
|
||||
it("should encode a string in UTF16 hexadecimal format", function () {
|
||||
expect(stringToUTF16HexString("hello world")).toEqual(
|
||||
"00680065006c006c006f00200077006f0072006c0064"
|
||||
);
|
||||
|
||||
expect(stringToUTF16HexString("こんにちは世界の")).toEqual(
|
||||
"30533093306b3061306f4e16754c306e"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("stringToUTF16String", function () {
|
||||
it("should encode a string in UTF16", function () {
|
||||
expect(stringToUTF16String("hello world")).toEqual(
|
||||
"\0h\0e\0l\0l\0o\0 \0w\0o\0r\0l\0d"
|
||||
);
|
||||
|
||||
expect(stringToUTF16String("こんにちは世界の")).toEqual(
|
||||
"\x30\x53\x30\x93\x30\x6b\x30\x61\x30\x6f\x4e\x16\x75\x4c\x30\x6e"
|
||||
);
|
||||
});
|
||||
|
||||
it("should encode a string in UTF16BE with a BOM", function () {
|
||||
expect(
|
||||
stringToUTF16String("hello world", /* bigEndian = */ true)
|
||||
).toEqual("\xfe\xff\0h\0e\0l\0l\0o\0 \0w\0o\0r\0l\0d");
|
||||
|
||||
expect(
|
||||
stringToUTF16String("こんにちは世界の", /* bigEndian = */ true)
|
||||
).toEqual(
|
||||
"\xfe\xff\x30\x53\x30\x93\x30\x6b\x30\x61\x30\x6f\x4e\x16\x75\x4c\x30\x6e"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getSizeInBytes", function () {
|
||||
it("should get the size in bytes to use to represent a positive integer", function () {
|
||||
expect(getSizeInBytes(0)).toEqual(0);
|
||||
for (let i = 1; i <= 0xff; i++) {
|
||||
expect(getSizeInBytes(i)).toEqual(1);
|
||||
}
|
||||
|
||||
for (let i = 0x100; i <= 0xffff; i += 0x100) {
|
||||
expect(getSizeInBytes(i)).toEqual(2);
|
||||
}
|
||||
|
||||
for (let i = 0x10000; i <= 0xffffff; i += 0x10000) {
|
||||
expect(getSizeInBytes(i)).toEqual(3);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
932
test/unit/crypto_spec.js
Normal file
932
test/unit/crypto_spec.js
Normal file
@@ -0,0 +1,932 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
AES128Cipher,
|
||||
AES256Cipher,
|
||||
ARCFourCipher,
|
||||
CipherTransformFactory,
|
||||
PDF17,
|
||||
PDF20,
|
||||
} from "../../src/core/crypto.js";
|
||||
import {
|
||||
calculateSHA384,
|
||||
calculateSHA512,
|
||||
} from "../../src/core/calculate_sha_other.js";
|
||||
import { Dict, Name } from "../../src/core/primitives.js";
|
||||
import {
|
||||
PasswordException,
|
||||
PasswordResponses,
|
||||
stringToBytes,
|
||||
} from "../../src/shared/util.js";
|
||||
import { calculateMD5 } from "../../src/core/calculate_md5.js";
|
||||
import { calculateSHA256 } from "../../src/core/calculate_sha256.js";
|
||||
|
||||
describe("crypto", function () {
|
||||
function hex2binary(s) {
|
||||
const digits = "0123456789ABCDEF";
|
||||
s = s.toUpperCase();
|
||||
const n = s.length >> 1;
|
||||
const result = new Uint8Array(n);
|
||||
for (let i = 0, j = 0; i < n; ++i) {
|
||||
const d1 = s.charAt(j++);
|
||||
const d2 = s.charAt(j++);
|
||||
const value = (digits.indexOf(d1) << 4) | digits.indexOf(d2);
|
||||
result[i] = value;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// RFC 1321, A.5 Test suite
|
||||
describe("calculateMD5", function () {
|
||||
it("should pass RFC 1321 test #1", function () {
|
||||
const input = stringToBytes("");
|
||||
const result = calculateMD5(input, 0, input.length);
|
||||
const expected = hex2binary("d41d8cd98f00b204e9800998ecf8427e");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass RFC 1321 test #2", function () {
|
||||
const input = stringToBytes("a");
|
||||
const result = calculateMD5(input, 0, input.length);
|
||||
const expected = hex2binary("0cc175b9c0f1b6a831c399e269772661");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass RFC 1321 test #3", function () {
|
||||
const input = stringToBytes("abc");
|
||||
const result = calculateMD5(input, 0, input.length);
|
||||
const expected = hex2binary("900150983cd24fb0d6963f7d28e17f72");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass RFC 1321 test #4", function () {
|
||||
const input = stringToBytes("message digest");
|
||||
const result = calculateMD5(input, 0, input.length);
|
||||
const expected = hex2binary("f96b697d7cb7938d525a2f31aaf161d0");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass RFC 1321 test #5", function () {
|
||||
const input = stringToBytes("abcdefghijklmnopqrstuvwxyz");
|
||||
const result = calculateMD5(input, 0, input.length);
|
||||
const expected = hex2binary("c3fcd3d76192e4007dfb496cca67e13b");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass RFC 1321 test #6", function () {
|
||||
const input = stringToBytes(
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
|
||||
);
|
||||
const result = calculateMD5(input, 0, input.length);
|
||||
const expected = hex2binary("d174ab98d277d9f5a5611c2c9f419d9f");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass RFC 1321 test #7", function () {
|
||||
const input = stringToBytes(
|
||||
"123456789012345678901234567890123456789012345678" +
|
||||
"90123456789012345678901234567890"
|
||||
);
|
||||
const result = calculateMD5(input, 0, input.length);
|
||||
const expected = hex2binary("57edf4a22be3c955ac49da2e2107b67a");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
// http://www.freemedialibrary.com/index.php/RC4_test_vectors are used
|
||||
describe("ARCFourCipher", function () {
|
||||
it("should pass test #1", function () {
|
||||
const key = hex2binary("0123456789abcdef");
|
||||
const input = hex2binary("0123456789abcdef");
|
||||
const cipher = new ARCFourCipher(key);
|
||||
const result = cipher.encryptBlock(input);
|
||||
const expected = hex2binary("75b7878099e0c596");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass test #2", function () {
|
||||
const key = hex2binary("0123456789abcdef");
|
||||
const input = hex2binary("0000000000000000");
|
||||
const cipher = new ARCFourCipher(key);
|
||||
const result = cipher.encryptBlock(input);
|
||||
const expected = hex2binary("7494c2e7104b0879");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass test #3", function () {
|
||||
const key = hex2binary("0000000000000000");
|
||||
const input = hex2binary("0000000000000000");
|
||||
const cipher = new ARCFourCipher(key);
|
||||
const result = cipher.encryptBlock(input);
|
||||
const expected = hex2binary("de188941a3375d3a");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass test #4", function () {
|
||||
const key = hex2binary("ef012345");
|
||||
const input = hex2binary("00000000000000000000");
|
||||
const cipher = new ARCFourCipher(key);
|
||||
const result = cipher.encryptBlock(input);
|
||||
const expected = hex2binary("d6a141a7ec3c38dfbd61");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass test #5", function () {
|
||||
const key = hex2binary("0123456789abcdef");
|
||||
const input = hex2binary(
|
||||
"010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"10101010101010101010101010101010101010101010101010101010101010101010" +
|
||||
"101010101010101010101"
|
||||
);
|
||||
const cipher = new ARCFourCipher(key);
|
||||
const result = cipher.encryptBlock(input);
|
||||
const expected = hex2binary(
|
||||
"7595c3e6114a09780c4ad452338e1ffd9a1be9498f813d76" +
|
||||
"533449b6778dcad8c78a8d2ba9ac66085d0e53d59c26c2d1c490c1ebbe0ce66d1b6b" +
|
||||
"1b13b6b919b847c25a91447a95e75e4ef16779cde8bf0a95850e32af9689444fd377" +
|
||||
"108f98fdcbd4e726567500990bcc7e0ca3c4aaa304a387d20f3b8fbbcd42a1bd311d" +
|
||||
"7a4303dda5ab078896ae80c18b0af66dff319616eb784e495ad2ce90d7f772a81747" +
|
||||
"b65f62093b1e0db9e5ba532fafec47508323e671327df9444432cb7367cec82f5d44" +
|
||||
"c0d00b67d650a075cd4b70dedd77eb9b10231b6b5b741347396d62897421d43df9b4" +
|
||||
"2e446e358e9c11a9b2184ecbef0cd8e7a877ef968f1390ec9b3d35a5585cb009290e" +
|
||||
"2fcde7b5ec66d9084be44055a619d9dd7fc3166f9487f7cb272912426445998514c1" +
|
||||
"5d53a18c864ce3a2b7555793988126520eacf2e3066e230c91bee4dd5304f5fd0405" +
|
||||
"b35bd99c73135d3d9bc335ee049ef69b3867bf2d7bd1eaa595d8bfc0066ff8d31509" +
|
||||
"eb0c6caa006c807a623ef84c3d33c195d23ee320c40de0558157c822d4b8c569d849" +
|
||||
"aed59d4e0fd7f379586b4b7ff684ed6a189f7486d49b9c4bad9ba24b96abf924372c" +
|
||||
"8a8fffb10d55354900a77a3db5f205e1b99fcd8660863a159ad4abe40fa48934163d" +
|
||||
"dde542a6585540fd683cbfd8c00f12129a284deacc4cdefe58be7137541c047126c8" +
|
||||
"d49e2755ab181ab7e940b0c0"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass test #6", function () {
|
||||
const key = hex2binary("fb029e3031323334");
|
||||
const input = hex2binary(
|
||||
"aaaa0300000008004500004e661a00008011be640a0001220af" +
|
||||
"fffff00890089003a000080a601100001000000000000204543454a4548454346434" +
|
||||
"550464545494546464343414341434143414341414100002000011bd0b604"
|
||||
);
|
||||
const cipher = new ARCFourCipher(key);
|
||||
const result = cipher.encryptBlock(input);
|
||||
const expected = hex2binary(
|
||||
"f69c5806bd6ce84626bcbefb9474650aad1f7909b0f64d5f" +
|
||||
"58a503a258b7ed22eb0ea64930d3a056a55742fcce141d485f8aa836dea18df42c53" +
|
||||
"80805ad0c61a5d6f58f41040b24b7d1a693856ed0d4398e7aee3bf0e2a2ca8f7"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should pass test #7", function () {
|
||||
const key = hex2binary("0123456789abcdef");
|
||||
const input = hex2binary(
|
||||
"123456789abcdef0123456789abcdef0123456789abcdef012345678"
|
||||
);
|
||||
const cipher = new ARCFourCipher(key);
|
||||
const result = cipher.encryptBlock(input);
|
||||
const expected = hex2binary(
|
||||
"66a0949f8af7d6891f7f832ba833c00c892ebe30143ce28740011ecf"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("calculateSHA256", function () {
|
||||
it("should properly hash abc", function () {
|
||||
const input = stringToBytes("abc");
|
||||
const result = calculateSHA256(input, 0, input.length);
|
||||
const expected = hex2binary(
|
||||
"BA7816BF8F01CFEA414140DE5DAE2223B00361A396177A9CB410FF61F20015AD"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should properly hash a multiblock input", function () {
|
||||
const input = stringToBytes(
|
||||
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"
|
||||
);
|
||||
const result = calculateSHA256(input, 0, input.length);
|
||||
const expected = hex2binary(
|
||||
"248D6A61D20638B8E5C026930C3E6039A33CE45964FF2167F6ECEDD419DB06C1"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("calculateSHA384", function () {
|
||||
it("should properly hash abc", function () {
|
||||
const input = stringToBytes("abc");
|
||||
const result = calculateSHA384(input, 0, input.length);
|
||||
const expected = hex2binary(
|
||||
"CB00753F45A35E8BB5A03D699AC65007272C32AB0EDED163" +
|
||||
"1A8B605A43FF5BED8086072BA1E7CC2358BAECA134C825A7"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should properly hash a multiblock input", function () {
|
||||
const input = stringToBytes(
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklm" +
|
||||
"ghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrs" +
|
||||
"mnopqrstnopqrstu"
|
||||
);
|
||||
const result = calculateSHA384(input, 0, input.length);
|
||||
const expected = hex2binary(
|
||||
"09330C33F71147E83D192FC782CD1B4753111B173B3B05D2" +
|
||||
"2FA08086E3B0F712FCC7C71A557E2DB966C3E9FA91746039"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("calculateSHA512", function () {
|
||||
it("should properly hash abc", function () {
|
||||
const input = stringToBytes("abc");
|
||||
const result = calculateSHA512(input, 0, input.length);
|
||||
const expected = hex2binary(
|
||||
"DDAF35A193617ABACC417349AE20413112E6FA4E89A97EA2" +
|
||||
"0A9EEEE64B55D39A2192992A274FC1A836BA3C23A3FEEBBD" +
|
||||
"454D4423643CE80E2A9AC94FA54CA49F"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should properly hash a multiblock input", function () {
|
||||
const input = stringToBytes(
|
||||
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklm" +
|
||||
"ghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrs" +
|
||||
"mnopqrstnopqrstu"
|
||||
);
|
||||
const result = calculateSHA512(input, 0, input.length);
|
||||
const expected = hex2binary(
|
||||
"8E959B75DAE313DA8CF4F72814FC143F8F7779C6EB9F7FA1" +
|
||||
"7299AEADB6889018501D289E4900F7E4331B99DEC4B5433A" +
|
||||
"C7D329EEB6DD26545E96E55B874BE909"
|
||||
);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("AES128", function () {
|
||||
describe("Encryption", function () {
|
||||
it("should be able to encrypt a block", function () {
|
||||
const input = hex2binary("00112233445566778899aabbccddeeff");
|
||||
const key = hex2binary("000102030405060708090a0b0c0d0e0f");
|
||||
const iv = hex2binary("00000000000000000000000000000000");
|
||||
const cipher = new AES128Cipher(key);
|
||||
const result = cipher.encrypt(input, iv);
|
||||
const expected = hex2binary("69c4e0d86a7b0430d8cdb78070b4c55a");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Decryption", function () {
|
||||
it("should be able to decrypt a block with IV in stream", function () {
|
||||
const input = hex2binary(
|
||||
"0000000000000000000000000000000069c4e0d86a7b0430d" +
|
||||
"8cdb78070b4c55a"
|
||||
);
|
||||
const key = hex2binary("000102030405060708090a0b0c0d0e0f");
|
||||
const cipher = new AES128Cipher(key);
|
||||
const result = cipher.decryptBlock(input);
|
||||
const expected = hex2binary("00112233445566778899aabbccddeeff");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("AES256", function () {
|
||||
describe("Encryption", function () {
|
||||
it("should be able to encrypt a block", function () {
|
||||
const input = hex2binary("00112233445566778899aabbccddeeff");
|
||||
const key = hex2binary(
|
||||
"000102030405060708090a0b0c0d0e0f101112131415161718" +
|
||||
"191a1b1c1d1e1f"
|
||||
);
|
||||
const iv = hex2binary("00000000000000000000000000000000");
|
||||
const cipher = new AES256Cipher(key);
|
||||
const result = cipher.encrypt(input, iv);
|
||||
const expected = hex2binary("8ea2b7ca516745bfeafc49904b496089");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Decryption", function () {
|
||||
it("should be able to decrypt a block with specified iv", function () {
|
||||
const input = hex2binary("8ea2b7ca516745bfeafc49904b496089");
|
||||
const key = hex2binary(
|
||||
"000102030405060708090a0b0c0d0e0f101112131415161718" +
|
||||
"191a1b1c1d1e1f"
|
||||
);
|
||||
const iv = hex2binary("00000000000000000000000000000000");
|
||||
const cipher = new AES256Cipher(key);
|
||||
const result = cipher.decryptBlock(input, false, iv);
|
||||
const expected = hex2binary("00112233445566778899aabbccddeeff");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
it("should be able to decrypt a block with IV in stream", function () {
|
||||
const input = hex2binary(
|
||||
"000000000000000000000000000000008ea2b7ca516745bf" +
|
||||
"eafc49904b496089"
|
||||
);
|
||||
const key = hex2binary(
|
||||
"000102030405060708090a0b0c0d0e0f101112131415161718" +
|
||||
"191a1b1c1d1e1f"
|
||||
);
|
||||
const cipher = new AES256Cipher(key);
|
||||
const result = cipher.decryptBlock(input, false);
|
||||
const expected = hex2binary("00112233445566778899aabbccddeeff");
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("PDF17Algorithm", function () {
|
||||
it("should correctly check a user key", function () {
|
||||
const alg = new PDF17();
|
||||
const password = new Uint8Array([117, 115, 101, 114]);
|
||||
const userValidation = new Uint8Array([
|
||||
117, 169, 4, 32, 159, 101, 22, 220,
|
||||
]);
|
||||
const userPassword = new Uint8Array([
|
||||
131, 242, 143, 160, 87, 2, 138, 134, 79, 253, 189, 173, 224, 73, 144,
|
||||
241, 190, 81, 197, 15, 249, 105, 145, 151, 15, 194, 65, 3, 1, 126, 187,
|
||||
221,
|
||||
]);
|
||||
const result = alg.checkUserPassword(
|
||||
password,
|
||||
userValidation,
|
||||
userPassword
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
it("should correctly check an owner key", function () {
|
||||
const alg = new PDF17();
|
||||
const password = new Uint8Array([111, 119, 110, 101, 114]);
|
||||
const ownerValidation = new Uint8Array([
|
||||
243, 118, 71, 153, 128, 17, 101, 62,
|
||||
]);
|
||||
const ownerPassword = new Uint8Array([
|
||||
60, 98, 137, 35, 51, 101, 200, 152, 210, 178, 226, 228, 134, 205, 163,
|
||||
24, 204, 126, 177, 36, 106, 50, 36, 125, 210, 172, 171, 120, 222, 108,
|
||||
139, 115,
|
||||
]);
|
||||
const uBytes = new Uint8Array([
|
||||
131, 242, 143, 160, 87, 2, 138, 134, 79, 253, 189, 173, 224, 73, 144,
|
||||
241, 190, 81, 197, 15, 249, 105, 145, 151, 15, 194, 65, 3, 1, 126, 187,
|
||||
221, 117, 169, 4, 32, 159, 101, 22, 220, 168, 94, 215, 192, 100, 38,
|
||||
188, 40,
|
||||
]);
|
||||
const result = alg.checkOwnerPassword(
|
||||
password,
|
||||
ownerValidation,
|
||||
uBytes,
|
||||
ownerPassword
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
it("should generate a file encryption key from the user key", function () {
|
||||
const alg = new PDF17();
|
||||
const password = new Uint8Array([117, 115, 101, 114]);
|
||||
const userKeySalt = new Uint8Array([168, 94, 215, 192, 100, 38, 188, 40]);
|
||||
const userEncryption = new Uint8Array([
|
||||
35, 150, 195, 169, 245, 51, 51, 255, 158, 158, 33, 242, 231, 75, 125,
|
||||
190, 25, 126, 172, 114, 195, 244, 137, 245, 234, 165, 42, 74, 60, 38,
|
||||
17, 17,
|
||||
]);
|
||||
const result = alg.getUserKey(password, userKeySalt, userEncryption);
|
||||
const expected = new Uint8Array([
|
||||
63, 114, 136, 209, 87, 61, 12, 30, 249, 1, 186, 144, 254, 248, 163, 153,
|
||||
151, 51, 133, 10, 80, 152, 206, 15, 72, 187, 231, 33, 224, 239, 13, 213,
|
||||
]);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
it("should generate a file encryption key from the owner key", function () {
|
||||
const alg = new PDF17();
|
||||
const password = new Uint8Array([111, 119, 110, 101, 114]);
|
||||
const ownerKeySalt = new Uint8Array([
|
||||
200, 245, 242, 12, 218, 123, 24, 120,
|
||||
]);
|
||||
const ownerEncryption = new Uint8Array([
|
||||
213, 202, 14, 189, 110, 76, 70, 191, 6, 195, 10, 190, 157, 100, 144, 85,
|
||||
8, 62, 123, 178, 156, 229, 50, 40, 229, 216, 54, 222, 34, 38, 106, 223,
|
||||
]);
|
||||
const uBytes = new Uint8Array([
|
||||
131, 242, 143, 160, 87, 2, 138, 134, 79, 253, 189, 173, 224, 73, 144,
|
||||
241, 190, 81, 197, 15, 249, 105, 145, 151, 15, 194, 65, 3, 1, 126, 187,
|
||||
221, 117, 169, 4, 32, 159, 101, 22, 220, 168, 94, 215, 192, 100, 38,
|
||||
188, 40,
|
||||
]);
|
||||
const result = alg.getOwnerKey(
|
||||
password,
|
||||
ownerKeySalt,
|
||||
uBytes,
|
||||
ownerEncryption
|
||||
);
|
||||
const expected = new Uint8Array([
|
||||
63, 114, 136, 209, 87, 61, 12, 30, 249, 1, 186, 144, 254, 248, 163, 153,
|
||||
151, 51, 133, 10, 80, 152, 206, 15, 72, 187, 231, 33, 224, 239, 13, 213,
|
||||
]);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("PDF20Algorithm", function () {
|
||||
it("should correctly check a user key", function () {
|
||||
const alg = new PDF20();
|
||||
const password = new Uint8Array([117, 115, 101, 114]);
|
||||
const userValidation = new Uint8Array([
|
||||
83, 245, 146, 101, 198, 247, 34, 198,
|
||||
]);
|
||||
const userPassword = new Uint8Array([
|
||||
94, 230, 205, 75, 166, 99, 250, 76, 219, 128, 17, 85, 57, 17, 33, 164,
|
||||
150, 46, 103, 176, 160, 156, 187, 233, 166, 223, 163, 253, 147, 235, 95,
|
||||
184,
|
||||
]);
|
||||
const result = alg.checkUserPassword(
|
||||
password,
|
||||
userValidation,
|
||||
userPassword
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
it("should correctly check an owner key", function () {
|
||||
const alg = new PDF20();
|
||||
const password = new Uint8Array([111, 119, 110, 101, 114]);
|
||||
const ownerValidation = new Uint8Array([
|
||||
142, 232, 169, 208, 202, 214, 5, 185,
|
||||
]);
|
||||
const ownerPassword = new Uint8Array([
|
||||
88, 232, 62, 54, 245, 26, 245, 209, 137, 123, 221, 72, 199, 49, 37, 217,
|
||||
31, 74, 115, 167, 127, 158, 176, 77, 45, 163, 87, 47, 39, 90, 217, 141,
|
||||
]);
|
||||
const uBytes = new Uint8Array([
|
||||
94, 230, 205, 75, 166, 99, 250, 76, 219, 128, 17, 85, 57, 17, 33, 164,
|
||||
150, 46, 103, 176, 160, 156, 187, 233, 166, 223, 163, 253, 147, 235, 95,
|
||||
184, 83, 245, 146, 101, 198, 247, 34, 198, 191, 11, 16, 94, 237, 216,
|
||||
20, 175,
|
||||
]);
|
||||
const result = alg.checkOwnerPassword(
|
||||
password,
|
||||
ownerValidation,
|
||||
uBytes,
|
||||
ownerPassword
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
it("should generate a file encryption key from the user key", function () {
|
||||
const alg = new PDF20();
|
||||
const password = new Uint8Array([117, 115, 101, 114]);
|
||||
const userKeySalt = new Uint8Array([191, 11, 16, 94, 237, 216, 20, 175]);
|
||||
const userEncryption = new Uint8Array([
|
||||
121, 208, 2, 181, 230, 89, 156, 60, 253, 143, 212, 28, 84, 180, 196,
|
||||
177, 173, 128, 221, 107, 46, 20, 94, 186, 135, 51, 95, 24, 20, 223, 254,
|
||||
36,
|
||||
]);
|
||||
const result = alg.getUserKey(password, userKeySalt, userEncryption);
|
||||
const expected = new Uint8Array([
|
||||
42, 218, 213, 39, 73, 91, 72, 79, 67, 38, 248, 133, 18, 189, 61, 34,
|
||||
107, 79, 29, 56, 59, 181, 213, 118, 113, 34, 65, 210, 87, 174, 22, 239,
|
||||
]);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
it("should generate a file encryption key from the owner key", function () {
|
||||
const alg = new PDF20();
|
||||
const password = new Uint8Array([111, 119, 110, 101, 114]);
|
||||
const ownerKeySalt = new Uint8Array([29, 208, 185, 46, 11, 76, 135, 149]);
|
||||
const ownerEncryption = new Uint8Array([
|
||||
209, 73, 224, 77, 103, 155, 201, 181, 190, 68, 223, 20, 62, 90, 56, 210,
|
||||
5, 240, 178, 128, 238, 124, 68, 254, 253, 244, 62, 108, 208, 135, 10,
|
||||
251,
|
||||
]);
|
||||
const uBytes = new Uint8Array([
|
||||
94, 230, 205, 75, 166, 99, 250, 76, 219, 128, 17, 85, 57, 17, 33, 164,
|
||||
150, 46, 103, 176, 160, 156, 187, 233, 166, 223, 163, 253, 147, 235, 95,
|
||||
184, 83, 245, 146, 101, 198, 247, 34, 198, 191, 11, 16, 94, 237, 216,
|
||||
20, 175,
|
||||
]);
|
||||
const result = alg.getOwnerKey(
|
||||
password,
|
||||
ownerKeySalt,
|
||||
uBytes,
|
||||
ownerEncryption
|
||||
);
|
||||
const expected = new Uint8Array([
|
||||
42, 218, 213, 39, 73, 91, 72, 79, 67, 38, 248, 133, 18, 189, 61, 34,
|
||||
107, 79, 29, 56, 59, 181, 213, 118, 113, 34, 65, 210, 87, 174, 22, 239,
|
||||
]);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("CipherTransformFactory", function () {
|
||||
function buildDict(map) {
|
||||
const dict = new Dict();
|
||||
for (const key in map) {
|
||||
dict.set(key, map[key]);
|
||||
}
|
||||
return dict;
|
||||
}
|
||||
|
||||
function ensurePasswordCorrect(dict, fileId, password) {
|
||||
try {
|
||||
const factory = new CipherTransformFactory(dict, fileId, password);
|
||||
expect("createCipherTransform" in factory).toEqual(true);
|
||||
} catch {
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
}
|
||||
}
|
||||
|
||||
function ensurePasswordNeeded(dict, fileId, password) {
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new CipherTransformFactory(dict, fileId, password);
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (ex) {
|
||||
expect(ex instanceof PasswordException).toEqual(true);
|
||||
expect(ex.code).toEqual(PasswordResponses.NEED_PASSWORD);
|
||||
}
|
||||
}
|
||||
|
||||
function ensurePasswordIncorrect(dict, fileId, password) {
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new CipherTransformFactory(dict, fileId, password);
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (ex) {
|
||||
expect(ex instanceof PasswordException).toEqual(true);
|
||||
expect(ex.code).toEqual(PasswordResponses.INCORRECT_PASSWORD);
|
||||
}
|
||||
}
|
||||
|
||||
function ensureAESEncryptedStringHasCorrectLength(
|
||||
dict,
|
||||
fileId,
|
||||
password,
|
||||
string
|
||||
) {
|
||||
const factory = new CipherTransformFactory(dict, fileId, password);
|
||||
const cipher = factory.createCipherTransform(123, 0);
|
||||
const encrypted = cipher.encryptString(string);
|
||||
|
||||
// The final length is a multiple of 16.
|
||||
// If the initial string has a length which is a multiple of 16
|
||||
// then 16 chars of padding are added.
|
||||
// So we've the mapping:
|
||||
// - length: [0-15] => new length: 16
|
||||
// - length: [16-31] => new length: 32
|
||||
// - length: [32-47] => new length: 48
|
||||
// ...
|
||||
expect(encrypted.length).toEqual(
|
||||
16 /* initialization vector length */ +
|
||||
16 * Math.ceil((string.length + 1) / 16)
|
||||
);
|
||||
}
|
||||
|
||||
function ensureEncryptDecryptIsIdentity(dict, fileId, password, string) {
|
||||
const factory = new CipherTransformFactory(dict, fileId, password);
|
||||
const cipher = factory.createCipherTransform(123, 0);
|
||||
const encrypted = cipher.encryptString(string);
|
||||
const decrypted = cipher.decryptString(encrypted);
|
||||
|
||||
expect(string).toEqual(decrypted);
|
||||
}
|
||||
|
||||
let fileId1, fileId2, dict1, dict2, dict3;
|
||||
let aes256Dict, aes256IsoDict, aes256BlankDict, aes256IsoBlankDict;
|
||||
|
||||
beforeAll(function () {
|
||||
fileId1 = unescape("%F6%C6%AF%17%F3rR%8DRM%9A%80%D1%EF%DF%18");
|
||||
fileId2 = unescape("%3CL_%3AD%96%AF@%9A%9D%B3%3Cx%1Cv%AC");
|
||||
|
||||
dict1 = buildDict({
|
||||
Filter: Name.get("Standard"),
|
||||
V: 2,
|
||||
Length: 128,
|
||||
O: unescape(
|
||||
"%80%C3%04%96%91o%20sl%3A%E6%1B%13T%91%F2%0DV%12%E3%FF%5E%B" +
|
||||
"B%E9VO%D8k%9A%CA%7C%5D"
|
||||
),
|
||||
U: unescape(
|
||||
"j%0C%8D%3EY%19%00%BCjd%7D%91%BD%AA%00%18%00%00%00%00%00%00" +
|
||||
"%00%00%00%00%00%00%00%00%00%00"
|
||||
),
|
||||
P: -1028,
|
||||
R: 3,
|
||||
});
|
||||
dict2 = buildDict({
|
||||
Filter: Name.get("Standard"),
|
||||
V: 4,
|
||||
Length: 128,
|
||||
O: unescape(
|
||||
"sF%14v.y5%27%DB%97%0A5%22%B3%E1%D4%AD%BD%9B%3C%B4%A5%89u%1" +
|
||||
"5%B2Y%F1h%D9%E9%F4"
|
||||
),
|
||||
U: unescape(
|
||||
"%93%04%89%A9%BF%8AE%A6%88%A2%DB%C2%A0%A8gn%00%00%00%00%00%" +
|
||||
"00%00%00%00%00%00%00%00%00%00%00"
|
||||
),
|
||||
P: -1084,
|
||||
R: 4,
|
||||
});
|
||||
dict3 = {
|
||||
Filter: Name.get("Standard"),
|
||||
V: 5,
|
||||
Length: 256,
|
||||
O: unescape(
|
||||
"%3Cb%89%233e%C8%98%D2%B2%E2%E4%86%CD%A3%18%CC%7E%B1%24j2%2" +
|
||||
"4%7D%D2%AC%ABx%DEl%8Bs%F3vG%99%80%11e%3E%C8%F5%F2%0C%DA%7B" +
|
||||
"%18x"
|
||||
),
|
||||
U: unescape(
|
||||
"%83%F2%8F%A0W%02%8A%86O%FD%BD%AD%E0I%90%F1%BEQ%C5%0F%F9i%9" +
|
||||
"1%97%0F%C2A%03%01%7E%BB%DDu%A9%04%20%9Fe%16%DC%A8%5E%D7%C0" +
|
||||
"d%26%BC%28"
|
||||
),
|
||||
OE: unescape(
|
||||
"%D5%CA%0E%BDnLF%BF%06%C3%0A%BE%9Dd%90U%08%3E%7B%B2%9C%E52" +
|
||||
"%28%E5%D86%DE%22%26j%DF"
|
||||
),
|
||||
UE: unescape(
|
||||
"%23%96%C3%A9%F533%FF%9E%9E%21%F2%E7K%7D%BE%19%7E%ACr%C3%F" +
|
||||
"4%89%F5%EA%A5*J%3C%26%11%11"
|
||||
),
|
||||
Perms: unescape("%D8%FC%844%E5e%0DB%5D%7Ff%FD%3COMM"),
|
||||
P: -1084,
|
||||
R: 5,
|
||||
};
|
||||
aes256Dict = buildDict(dict3);
|
||||
aes256IsoDict = buildDict({
|
||||
Filter: Name.get("Standard"),
|
||||
V: 5,
|
||||
Length: 256,
|
||||
O: unescape(
|
||||
"X%E8%3E6%F5%1A%F5%D1%89%7B%DDH%C71%25%D9%1FJs%A7%7F%9E%B0M" +
|
||||
"-%A3W/%27Z%D9%8D%8E%E8%A9%D0%CA%D6%05%B9%1D%D0%B9.%0BL%87%" +
|
||||
"95"
|
||||
),
|
||||
U: unescape(
|
||||
"%5E%E6%CDK%A6c%FAL%DB%80%11U9%11%21%A4%96.g%B0%A0%9C%BB%E9" +
|
||||
"%A6%DF%A3%FD%93%EB_%B8S%F5%92e%C6%F7%22%C6%BF%0B%10%5E%ED%" +
|
||||
"D8%14%AF"
|
||||
),
|
||||
OE: unescape(
|
||||
"%D1I%E0Mg%9B%C9%B5%BED%DF%14%3EZ8%D2%05%F0%B2%80%EE%7CD%F" +
|
||||
"E%FD%F4%3El%D0%87%0A%FB"
|
||||
),
|
||||
UE: unescape(
|
||||
"y%D0%02%B5%E6Y%9C%3C%FD%8F%D4%1CT%B4%C4%B1%AD%80%DDk.%14%" +
|
||||
"5E%BA%873_%18%14%DF%FE%24"
|
||||
),
|
||||
Perms: unescape("l%AD%0F%A0%EBM%86WM%3E%CB%B5%E0X%C97"),
|
||||
P: -1084,
|
||||
R: 6,
|
||||
});
|
||||
aes256BlankDict = buildDict({
|
||||
Filter: Name.get("Standard"),
|
||||
V: 5,
|
||||
Length: 256,
|
||||
O: unescape(
|
||||
"%B8p%04%C3g%26%FCW%CCN%D4%16%A1%E8%950YZ%C9%9E%B1-%97%F3%F" +
|
||||
"E%03%13%19ffZn%8F%F5%EB%EC%CC5sV%10e%CEl%B5%E9G%C1"
|
||||
),
|
||||
U: unescape(
|
||||
"%83%D4zi%F1O0%961%12%CC%82%CB%CA%BF5y%FD%21%EB%E4%D1%B5%1D" +
|
||||
"%D6%FA%14%F3%BE%8Fqs%EF%88%DE%E2%E8%DC%F55%E4%B8%16%C8%14%" +
|
||||
"8De%1E"
|
||||
),
|
||||
OE: unescape(
|
||||
"%8F%19%E8%D4%27%D5%07%CA%C6%A1%11%A6a%5Bt%F4%DF%0F%84%29%" +
|
||||
"0F%E4%EFF7%5B%5B%11%A0%8F%17e"
|
||||
),
|
||||
UE: unescape(
|
||||
"%81%F5%5D%B0%28%81%E4%7F_%7C%8F%85b%A0%7E%10%D0%88lx%7B%7" +
|
||||
"EJ%5E%912%B6d%12%27%05%F6"
|
||||
),
|
||||
Perms: unescape("%86%1562%0D%AE%A2%FB%5D%3B%22%3Dq%12%B2H"),
|
||||
P: -1084,
|
||||
R: 5,
|
||||
});
|
||||
aes256IsoBlankDict = buildDict({
|
||||
Filter: Name.get("Standard"),
|
||||
V: 5,
|
||||
Length: 256,
|
||||
O: unescape(
|
||||
"%F7%DB%99U%A6M%ACk%AF%CF%D7AFw%E9%C1%91%CBDgI%23R%CF%0C%15" +
|
||||
"r%D74%0D%CE%E9%91@%E4%98QF%BF%88%7Ej%DE%AD%8F%F4@%C1"
|
||||
),
|
||||
U: unescape(
|
||||
"%1A%A9%DC%918%83%93k%29%5B%117%B16%DB%E8%8E%FE%28%E5%89%D4" +
|
||||
"%0E%AD%12%3B%7DN_6fez%8BG%18%05YOh%7DZH%A3Z%87%17*"
|
||||
),
|
||||
OE: unescape(
|
||||
"%A4a%88%20h%1B%7F%CD%D5%CAc%D8R%83%E5%D6%1C%D2%98%07%984%" +
|
||||
"BA%AF%1B%B4%7FQ%F8%1EU%7D"
|
||||
),
|
||||
UE: unescape(
|
||||
"%A0%0AZU%27%1D%27%2C%0B%FE%0E%A2L%F9b%5E%A1%B9%D6v7b%B26%" +
|
||||
"A9N%99%F1%A4Deq"
|
||||
),
|
||||
Perms: unescape("%03%F2i%07%0D%C3%F9%F2%28%80%B7%F5%DD%D1c%EB"),
|
||||
P: -1084,
|
||||
R: 6,
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
fileId1 = fileId2 = dict1 = dict2 = dict3 = null;
|
||||
aes256Dict = aes256IsoDict = aes256BlankDict = aes256IsoBlankDict = null;
|
||||
});
|
||||
|
||||
describe("#ctor", function () {
|
||||
describe("AES256 Revision 5", function () {
|
||||
it("should accept user password", function () {
|
||||
ensurePasswordCorrect(aes256Dict, fileId1, "user");
|
||||
});
|
||||
it("should accept owner password", function () {
|
||||
ensurePasswordCorrect(aes256Dict, fileId1, "owner");
|
||||
});
|
||||
it("should not accept blank password", function () {
|
||||
ensurePasswordNeeded(aes256Dict, fileId1);
|
||||
});
|
||||
it("should not accept wrong password", function () {
|
||||
ensurePasswordIncorrect(aes256Dict, fileId1, "wrong");
|
||||
});
|
||||
it("should accept blank password", function () {
|
||||
ensurePasswordCorrect(aes256BlankDict, fileId1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("AES256 Revision 6", function () {
|
||||
it("should accept user password", function () {
|
||||
ensurePasswordCorrect(aes256IsoDict, fileId1, "user");
|
||||
});
|
||||
it("should accept owner password", function () {
|
||||
ensurePasswordCorrect(aes256IsoDict, fileId1, "owner");
|
||||
});
|
||||
it("should not accept blank password", function () {
|
||||
ensurePasswordNeeded(aes256IsoDict, fileId1);
|
||||
});
|
||||
it("should not accept wrong password", function () {
|
||||
ensurePasswordIncorrect(aes256IsoDict, fileId1, "wrong");
|
||||
});
|
||||
it("should accept blank password", function () {
|
||||
ensurePasswordCorrect(aes256IsoBlankDict, fileId1);
|
||||
});
|
||||
});
|
||||
|
||||
it("should accept user password", function () {
|
||||
ensurePasswordCorrect(dict1, fileId1, "123456");
|
||||
});
|
||||
it("should accept owner password", function () {
|
||||
ensurePasswordCorrect(dict1, fileId1, "654321");
|
||||
});
|
||||
it("should not accept blank password", function () {
|
||||
ensurePasswordNeeded(dict1, fileId1);
|
||||
});
|
||||
it("should not accept wrong password", function () {
|
||||
ensurePasswordIncorrect(dict1, fileId1, "wrong");
|
||||
});
|
||||
it("should accept blank password", function () {
|
||||
ensurePasswordCorrect(dict2, fileId2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Encrypt and decrypt", function () {
|
||||
it("should encrypt and decrypt using ARCFour", function () {
|
||||
dict3.CF = buildDict({
|
||||
Identity: buildDict({
|
||||
CFM: Name.get("V2"),
|
||||
}),
|
||||
});
|
||||
const dict = buildDict(dict3);
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "hello world");
|
||||
});
|
||||
it("should encrypt and decrypt using AES128", function () {
|
||||
dict3.CF = buildDict({
|
||||
Identity: buildDict({
|
||||
CFM: Name.get("AESV2"),
|
||||
}),
|
||||
});
|
||||
const dict = buildDict(dict3);
|
||||
// 0 char
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "");
|
||||
// 1 char
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "a");
|
||||
// 2 chars
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "aa");
|
||||
// 16 chars
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "aaaaaaaaaaaaaaaa");
|
||||
// 19 chars
|
||||
ensureEncryptDecryptIsIdentity(
|
||||
dict,
|
||||
fileId1,
|
||||
"user",
|
||||
"aaaaaaaaaaaaaaaaaaa"
|
||||
);
|
||||
});
|
||||
it("should encrypt and decrypt using AES256", function () {
|
||||
dict3.CF = buildDict({
|
||||
Identity: buildDict({
|
||||
CFM: Name.get("AESV3"),
|
||||
}),
|
||||
});
|
||||
const dict = buildDict(dict3);
|
||||
// 0 chars
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "");
|
||||
// 4 chars
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "aaaa");
|
||||
// 5 chars
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "aaaaa");
|
||||
// 16 chars
|
||||
ensureEncryptDecryptIsIdentity(dict, fileId1, "user", "aaaaaaaaaaaaaaaa");
|
||||
// 22 chars
|
||||
ensureEncryptDecryptIsIdentity(
|
||||
dict,
|
||||
fileId1,
|
||||
"user",
|
||||
"aaaaaaaaaaaaaaaaaaaaaa"
|
||||
);
|
||||
});
|
||||
it("should encrypt and have the correct length using AES128", function () {
|
||||
dict3.CF = buildDict({
|
||||
Identity: buildDict({
|
||||
CFM: Name.get("AESV2"),
|
||||
}),
|
||||
});
|
||||
const dict = buildDict(dict3);
|
||||
// 0 char
|
||||
ensureAESEncryptedStringHasCorrectLength(dict, fileId1, "user", "");
|
||||
// 1 char
|
||||
ensureAESEncryptedStringHasCorrectLength(dict, fileId1, "user", "a");
|
||||
// 2 chars
|
||||
ensureAESEncryptedStringHasCorrectLength(dict, fileId1, "user", "aa");
|
||||
// 16 chars
|
||||
ensureAESEncryptedStringHasCorrectLength(
|
||||
dict,
|
||||
fileId1,
|
||||
"user",
|
||||
"aaaaaaaaaaaaaaaa"
|
||||
);
|
||||
// 19 chars
|
||||
ensureAESEncryptedStringHasCorrectLength(
|
||||
dict,
|
||||
fileId1,
|
||||
"user",
|
||||
"aaaaaaaaaaaaaaaaaaa"
|
||||
);
|
||||
});
|
||||
it("should encrypt and have the correct length using AES256", function () {
|
||||
dict3.CF = buildDict({
|
||||
Identity: buildDict({
|
||||
CFM: Name.get("AESV3"),
|
||||
}),
|
||||
});
|
||||
const dict = buildDict(dict3);
|
||||
// 0 char
|
||||
ensureAESEncryptedStringHasCorrectLength(dict, fileId1, "user", "");
|
||||
// 4 chars
|
||||
ensureAESEncryptedStringHasCorrectLength(dict, fileId1, "user", "aaaa");
|
||||
// 5 chars
|
||||
ensureAESEncryptedStringHasCorrectLength(dict, fileId1, "user", "aaaaa");
|
||||
// 16 chars
|
||||
ensureAESEncryptedStringHasCorrectLength(
|
||||
dict,
|
||||
fileId1,
|
||||
"user",
|
||||
"aaaaaaaaaaaaaaaa"
|
||||
);
|
||||
// 22 chars
|
||||
ensureAESEncryptedStringHasCorrectLength(
|
||||
dict,
|
||||
fileId1,
|
||||
"user",
|
||||
"aaaaaaaaaaaaaaaaaaaaaa"
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
209
test/unit/custom_spec.js
Normal file
209
test/unit/custom_spec.js
Normal file
@@ -0,0 +1,209 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { buildGetDocumentParams } from "./test_utils.js";
|
||||
import { getDocument } from "../../src/display/api.js";
|
||||
|
||||
function getTopLeftPixel(canvasContext) {
|
||||
const imgData = canvasContext.getImageData(0, 0, 1, 1);
|
||||
return {
|
||||
r: imgData.data[0],
|
||||
g: imgData.data[1],
|
||||
b: imgData.data[2],
|
||||
a: imgData.data[3],
|
||||
};
|
||||
}
|
||||
|
||||
describe("custom canvas rendering", function () {
|
||||
const transparentGetDocumentParams =
|
||||
buildGetDocumentParams("transparent.pdf");
|
||||
|
||||
let loadingTask, doc, page;
|
||||
|
||||
beforeAll(async function () {
|
||||
loadingTask = getDocument(transparentGetDocumentParams);
|
||||
doc = await loadingTask.promise;
|
||||
page = await doc.getPage(1);
|
||||
});
|
||||
|
||||
afterAll(async function () {
|
||||
doc = null;
|
||||
page = null;
|
||||
await loadingTask.destroy();
|
||||
});
|
||||
|
||||
it("renders to canvas with a default white background", async function () {
|
||||
const viewport = page.getViewport({ scale: 1 });
|
||||
const { canvasFactory } = doc;
|
||||
const canvasAndCtx = canvasFactory.create(viewport.width, viewport.height);
|
||||
|
||||
const renderTask = page.render({
|
||||
canvas: canvasAndCtx.canvas,
|
||||
viewport,
|
||||
});
|
||||
await renderTask.promise;
|
||||
|
||||
expect(getTopLeftPixel(canvasAndCtx.context)).toEqual({
|
||||
r: 255,
|
||||
g: 255,
|
||||
b: 255,
|
||||
a: 255,
|
||||
});
|
||||
canvasFactory.destroy(canvasAndCtx);
|
||||
});
|
||||
|
||||
it("renders to canvas with a custom background", async function () {
|
||||
const viewport = page.getViewport({ scale: 1 });
|
||||
const { canvasFactory } = doc;
|
||||
const canvasAndCtx = canvasFactory.create(viewport.width, viewport.height);
|
||||
|
||||
const renderTask = page.render({
|
||||
canvas: canvasAndCtx.canvas,
|
||||
viewport,
|
||||
background: "rgba(255,0,0,1.0)",
|
||||
});
|
||||
await renderTask.promise;
|
||||
|
||||
expect(getTopLeftPixel(canvasAndCtx.context)).toEqual({
|
||||
r: 255,
|
||||
g: 0,
|
||||
b: 0,
|
||||
a: 255,
|
||||
});
|
||||
canvasFactory.destroy(canvasAndCtx);
|
||||
});
|
||||
});
|
||||
|
||||
describe("custom ownerDocument", function () {
|
||||
const FontFace = globalThis.FontFace;
|
||||
|
||||
const checkFont = font => /g_d\d+_f1/.test(font.family);
|
||||
const checkFontFaceRule = rule =>
|
||||
/^@font-face {font-family:"g_d\d+_f1";src:/.test(rule);
|
||||
|
||||
beforeEach(() => {
|
||||
globalThis.FontFace = function MockFontFace(name) {
|
||||
this.family = name;
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
globalThis.FontFace = FontFace;
|
||||
});
|
||||
|
||||
function getMocks() {
|
||||
const elements = [];
|
||||
const createElement = name => {
|
||||
let element =
|
||||
typeof document !== "undefined" && document.createElement(name);
|
||||
if (name === "style") {
|
||||
element = {
|
||||
tagName: name,
|
||||
sheet: {
|
||||
cssRules: [],
|
||||
insertRule(rule) {
|
||||
this.cssRules.push(rule);
|
||||
},
|
||||
},
|
||||
};
|
||||
Object.assign(element, {
|
||||
remove() {
|
||||
this.remove.called = true;
|
||||
},
|
||||
});
|
||||
}
|
||||
elements.push(element);
|
||||
return element;
|
||||
};
|
||||
const ownerDocument = {
|
||||
fonts: new Set(),
|
||||
createElement,
|
||||
documentElement: {
|
||||
getElementsByTagName: () => [{ append: () => {} }],
|
||||
},
|
||||
};
|
||||
|
||||
return {
|
||||
elements,
|
||||
ownerDocument,
|
||||
};
|
||||
}
|
||||
|
||||
it("should use given document for loading fonts (with Font Loading API)", async function () {
|
||||
const { ownerDocument, elements } = getMocks();
|
||||
const getDocumentParams = buildGetDocumentParams(
|
||||
"TrueType_without_cmap.pdf",
|
||||
{
|
||||
disableFontFace: false,
|
||||
ownerDocument,
|
||||
}
|
||||
);
|
||||
|
||||
const loadingTask = getDocument(getDocumentParams);
|
||||
const doc = await loadingTask.promise;
|
||||
const page = await doc.getPage(1);
|
||||
|
||||
const viewport = page.getViewport({ scale: 1 });
|
||||
const { canvasFactory } = doc;
|
||||
const canvasAndCtx = canvasFactory.create(viewport.width, viewport.height);
|
||||
|
||||
await page.render({
|
||||
canvas: canvasAndCtx.canvas,
|
||||
viewport,
|
||||
}).promise;
|
||||
|
||||
const style = elements.find(element => element.tagName === "style");
|
||||
expect(style).toBeFalsy();
|
||||
expect(ownerDocument.fonts.size).toBeGreaterThanOrEqual(1);
|
||||
expect(Array.from(ownerDocument.fonts).find(checkFont)).toBeTruthy();
|
||||
|
||||
await loadingTask.destroy();
|
||||
canvasFactory.destroy(canvasAndCtx);
|
||||
expect(ownerDocument.fonts.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should use given document for loading fonts (with CSS rules)", async function () {
|
||||
const { ownerDocument, elements } = getMocks();
|
||||
ownerDocument.fonts = null;
|
||||
const getDocumentParams = buildGetDocumentParams(
|
||||
"TrueType_without_cmap.pdf",
|
||||
{
|
||||
disableFontFace: false,
|
||||
ownerDocument,
|
||||
}
|
||||
);
|
||||
|
||||
const loadingTask = getDocument(getDocumentParams);
|
||||
const doc = await loadingTask.promise;
|
||||
const page = await doc.getPage(1);
|
||||
|
||||
const viewport = page.getViewport({ scale: 1 });
|
||||
const { canvasFactory } = doc;
|
||||
const canvasAndCtx = canvasFactory.create(viewport.width, viewport.height);
|
||||
|
||||
await page.render({
|
||||
canvas: canvasAndCtx.canvas,
|
||||
viewport,
|
||||
}).promise;
|
||||
|
||||
const style = elements.find(element => element.tagName === "style");
|
||||
expect(style.sheet.cssRules.length).toBeGreaterThanOrEqual(1);
|
||||
expect(style.sheet.cssRules.find(checkFontFaceRule)).toBeTruthy();
|
||||
|
||||
await loadingTask.destroy();
|
||||
canvasFactory.destroy(canvasAndCtx);
|
||||
expect(style.remove.called).toBe(true);
|
||||
});
|
||||
});
|
||||
274
test/unit/default_appearance_spec.js
Normal file
274
test/unit/default_appearance_spec.js
Normal file
@@ -0,0 +1,274 @@
|
||||
/* Copyright 2020 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
createDefaultAppearance,
|
||||
parseAppearanceStream,
|
||||
parseDefaultAppearance,
|
||||
} from "../../src/core/default_appearance.js";
|
||||
import { Dict, Name } from "../../src/core/primitives.js";
|
||||
import { NullStream, StringStream } from "../../src/core/stream.js";
|
||||
import { GlobalColorSpaceCache } from "../../src/core/image_utils.js";
|
||||
import { XRefMock } from "./test_utils.js";
|
||||
|
||||
describe("Default appearance", function () {
|
||||
describe("parseDefaultAppearance and createDefaultAppearance", function () {
|
||||
it("should parse and create default appearance", function () {
|
||||
const da = "/F1 12 Tf 0.1 0.2 0.3 rg";
|
||||
const result = {
|
||||
fontSize: 12,
|
||||
fontName: "F1",
|
||||
fontColor: new Uint8ClampedArray([26, 51, 76]),
|
||||
};
|
||||
expect(parseDefaultAppearance(da)).toEqual(result);
|
||||
expect(createDefaultAppearance(result)).toEqual(da);
|
||||
|
||||
expect(
|
||||
parseDefaultAppearance(
|
||||
"0.1 0.2 0.3 rg /F1 12 Tf 0.3 0.2 0.1 rg /F2 13 Tf"
|
||||
)
|
||||
).toEqual({
|
||||
fontSize: 13,
|
||||
fontName: "F2",
|
||||
fontColor: new Uint8ClampedArray([76, 51, 26]),
|
||||
});
|
||||
});
|
||||
|
||||
it("should parse default appearance with save/restore", function () {
|
||||
const da = "q Q 0.1 0.2 0.3 rg /F1 12 Tf q 0.3 0.2 0.1 rg /F2 13 Tf Q";
|
||||
expect(parseDefaultAppearance(da)).toEqual({
|
||||
fontSize: 12,
|
||||
fontName: "F1",
|
||||
fontColor: new Uint8ClampedArray([26, 51, 76]),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseAppearanceStream", () => {
|
||||
let evaluatorOptions, xref, globalColorSpaceCache;
|
||||
|
||||
beforeAll(function () {
|
||||
evaluatorOptions = {
|
||||
isEvalSupported: true,
|
||||
isOffscreenCanvasSupported: false,
|
||||
};
|
||||
xref = new XRefMock();
|
||||
globalColorSpaceCache = new GlobalColorSpaceCache();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
evaluatorOptions = null;
|
||||
xref = null;
|
||||
globalColorSpaceCache = null;
|
||||
});
|
||||
|
||||
it("should parse a FreeText (from Acrobat) appearance", () => {
|
||||
const appearance = new StringStream(`
|
||||
0 w
|
||||
46.5 621.0552 156.389 18.969 re
|
||||
n
|
||||
q
|
||||
1 0 0 1 0 0 cm
|
||||
46.5 621.0552 156.389 18.969 re
|
||||
W
|
||||
n
|
||||
0 g
|
||||
1 w
|
||||
BT
|
||||
/Helv 14 Tf
|
||||
0.419998 0.850006 0.160004 rg
|
||||
46.5 626.77 Td
|
||||
(Hello ) Tj
|
||||
35.793 0 Td
|
||||
(World ) Tj
|
||||
40.448 0 Td
|
||||
(from ) Tj
|
||||
31.89 0 Td
|
||||
(Acrobat) Tj
|
||||
ET
|
||||
Q`);
|
||||
const result = {
|
||||
fontSize: 14,
|
||||
fontName: "Helv",
|
||||
fontColor: new Uint8ClampedArray([107, 217, 41]),
|
||||
};
|
||||
expect(
|
||||
parseAppearanceStream(
|
||||
appearance,
|
||||
evaluatorOptions,
|
||||
xref,
|
||||
globalColorSpaceCache
|
||||
)
|
||||
).toEqual(result);
|
||||
expect(appearance.pos).toEqual(0);
|
||||
});
|
||||
|
||||
it("should parse a FreeText (from Firefox) appearance", () => {
|
||||
const appearance = new StringStream(`
|
||||
q
|
||||
0 0 203.7 28.3 re W n
|
||||
BT
|
||||
1 0 0 1 0 34.6 Tm 0 Tc 0.93 0.17 0.44 rg
|
||||
/Helv 18 Tf
|
||||
0 -24.3 Td (Hello World From Firefox) Tj
|
||||
ET
|
||||
Q`);
|
||||
const result = {
|
||||
fontSize: 18,
|
||||
fontName: "Helv",
|
||||
fontColor: new Uint8ClampedArray([237, 43, 112]),
|
||||
};
|
||||
expect(
|
||||
parseAppearanceStream(
|
||||
appearance,
|
||||
evaluatorOptions,
|
||||
xref,
|
||||
globalColorSpaceCache
|
||||
)
|
||||
).toEqual(result);
|
||||
expect(appearance.pos).toEqual(0);
|
||||
});
|
||||
|
||||
it("should parse a FreeText (from Preview) appearance", () => {
|
||||
const indexedDict = new Dict(xref);
|
||||
indexedDict.set("Alternate", Name.get("DeviceRGB"));
|
||||
indexedDict.set("N", 3);
|
||||
indexedDict.set("Length", 0);
|
||||
|
||||
const indexedStream = new NullStream();
|
||||
indexedStream.dict = indexedDict;
|
||||
|
||||
const colorSpaceDict = new Dict(xref);
|
||||
colorSpaceDict.set("Cs1", [Name.get("ICCBased"), indexedStream]);
|
||||
|
||||
const resourcesDict = new Dict(xref);
|
||||
resourcesDict.set("ColorSpace", colorSpaceDict);
|
||||
|
||||
const appearanceDict = new Dict(xref);
|
||||
appearanceDict.set("Resources", resourcesDict);
|
||||
|
||||
const appearance = new StringStream(`
|
||||
q Q q 2.128482 2.128482 247.84 26 re W n /Cs1 cs 0.52799 0.3071 0.99498 sc
|
||||
q 1 0 0 -1 -108.3364 459.8485 cm BT 22.00539 0 0 -22.00539 110.5449 452.72
|
||||
Tm /TT1 1 Tf [ (H) -0.2 (e) -0.2 (l) -0.2 (l) -0.2 (o) -0.2 ( ) 0.2 (W) 17.7
|
||||
(o) -0.2 (rl) -0.2 (d) -0.2 ( ) 0.2 (f) 0.2 (ro) -0.2 (m ) 0.2 (Pre) -0.2
|
||||
(vi) -0.2 (e) -0.2 (w) ] TJ ET Q Q`);
|
||||
appearance.dict = appearanceDict;
|
||||
|
||||
const result = {
|
||||
fontSize: 22.00539,
|
||||
fontName: "TT1",
|
||||
fontColor: new Uint8ClampedArray([135, 78, 254]),
|
||||
};
|
||||
expect(
|
||||
parseAppearanceStream(
|
||||
appearance,
|
||||
evaluatorOptions,
|
||||
xref,
|
||||
globalColorSpaceCache
|
||||
)
|
||||
).toEqual(result);
|
||||
expect(appearance.pos).toEqual(0);
|
||||
});
|
||||
|
||||
it("should parse a FreeText (from Edge) appearance", () => {
|
||||
const appearance = new StringStream(`
|
||||
q
|
||||
0 0 292.5 18.75 re W n
|
||||
BT
|
||||
0 Tc
|
||||
0.0627451 0.486275 0.0627451 rg
|
||||
0 3.8175 Td
|
||||
/Helv 16.5 Tf
|
||||
(Hello World from Edge without Acrobat) Tj
|
||||
ET
|
||||
Q`);
|
||||
const result = {
|
||||
fontSize: 16.5,
|
||||
fontName: "Helv",
|
||||
fontColor: new Uint8ClampedArray([16, 124, 16]),
|
||||
};
|
||||
expect(
|
||||
parseAppearanceStream(
|
||||
appearance,
|
||||
evaluatorOptions,
|
||||
xref,
|
||||
globalColorSpaceCache
|
||||
)
|
||||
).toEqual(result);
|
||||
expect(appearance.pos).toEqual(0);
|
||||
});
|
||||
|
||||
it("should parse a FreeText (from Foxit) appearance", () => {
|
||||
const appearance = new StringStream(`
|
||||
q
|
||||
/Tx BMC
|
||||
0 -22.333 197.667 22.333 re
|
||||
W
|
||||
n
|
||||
BT
|
||||
0.584314 0.247059 0.235294 rg
|
||||
0 -18.1 Td
|
||||
/FXF0 20 Tf
|
||||
(Hello World from Foxit) Tj
|
||||
ET
|
||||
EMC
|
||||
Q`);
|
||||
const result = {
|
||||
fontSize: 20,
|
||||
fontName: "FXF0",
|
||||
fontColor: new Uint8ClampedArray([149, 63, 60]),
|
||||
};
|
||||
expect(
|
||||
parseAppearanceStream(
|
||||
appearance,
|
||||
evaluatorOptions,
|
||||
xref,
|
||||
globalColorSpaceCache
|
||||
)
|
||||
).toEqual(result);
|
||||
expect(appearance.pos).toEqual(0);
|
||||
});
|
||||
|
||||
it("should parse a FreeText (from Okular) appearance", () => {
|
||||
const appearance = new StringStream(`
|
||||
q
|
||||
0.00 0.00 172.65 41.46 re W n
|
||||
0.00000 0.33333 0.49804 rg
|
||||
BT 1 0 0 1 0.00 41.46 Tm
|
||||
/Invalid_font 18.00 Tf
|
||||
0.00 -18.00 Td
|
||||
(Hello World from) Tj
|
||||
/Invalid_font 18.00 Tf
|
||||
0.00 -18.00 Td
|
||||
(Okular) Tj
|
||||
ET Q`);
|
||||
const result = {
|
||||
fontSize: 18,
|
||||
fontName: "Invalid_font",
|
||||
fontColor: new Uint8ClampedArray([0, 85, 127]),
|
||||
};
|
||||
expect(
|
||||
parseAppearanceStream(
|
||||
appearance,
|
||||
evaluatorOptions,
|
||||
xref,
|
||||
globalColorSpaceCache
|
||||
)
|
||||
).toEqual(result);
|
||||
expect(appearance.pos).toEqual(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
394
test/unit/display_utils_spec.js
Normal file
394
test/unit/display_utils_spec.js
Normal file
@@ -0,0 +1,394 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
applyOpacity,
|
||||
findContrastColor,
|
||||
getFilenameFromUrl,
|
||||
getPdfFilenameFromUrl,
|
||||
isValidFetchUrl,
|
||||
PDFDateString,
|
||||
renderRichText,
|
||||
} from "../../src/display/display_utils.js";
|
||||
import { isNodeJS, toBase64Util } from "../../src/shared/util.js";
|
||||
|
||||
describe("display_utils", function () {
|
||||
describe("getFilenameFromUrl", function () {
|
||||
it("should get the filename from an absolute URL", function () {
|
||||
const url = "https://server.org/filename.pdf";
|
||||
expect(getFilenameFromUrl(url)).toEqual("filename.pdf");
|
||||
});
|
||||
|
||||
it("should get the filename from a relative URL", function () {
|
||||
const url = "../../filename.pdf";
|
||||
expect(getFilenameFromUrl(url)).toEqual("filename.pdf");
|
||||
});
|
||||
|
||||
it("should get the filename from a URL with an anchor", function () {
|
||||
const url = "https://server.org/filename.pdf#foo";
|
||||
expect(getFilenameFromUrl(url)).toEqual("filename.pdf");
|
||||
});
|
||||
|
||||
it("should get the filename from a URL with query parameters", function () {
|
||||
const url = "https://server.org/filename.pdf?foo=bar";
|
||||
expect(getFilenameFromUrl(url)).toEqual("filename.pdf");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPdfFilenameFromUrl", function () {
|
||||
it("gets PDF filename", function () {
|
||||
// Relative URL
|
||||
expect(getPdfFilenameFromUrl("/pdfs/file1.pdf")).toEqual("file1.pdf");
|
||||
// Absolute URL
|
||||
expect(
|
||||
getPdfFilenameFromUrl("http://www.example.com/pdfs/file2.pdf")
|
||||
).toEqual("file2.pdf");
|
||||
});
|
||||
|
||||
it("gets fallback filename", function () {
|
||||
// Relative URL
|
||||
expect(getPdfFilenameFromUrl("/pdfs/file1.txt")).toEqual("document.pdf");
|
||||
// Absolute URL
|
||||
expect(
|
||||
getPdfFilenameFromUrl("http://www.example.com/pdfs/file2.txt")
|
||||
).toEqual("document.pdf");
|
||||
});
|
||||
|
||||
it("gets custom fallback filename", function () {
|
||||
// Relative URL
|
||||
expect(getPdfFilenameFromUrl("/pdfs/file1.txt", "qwerty1.pdf")).toEqual(
|
||||
"qwerty1.pdf"
|
||||
);
|
||||
// Absolute URL
|
||||
expect(
|
||||
getPdfFilenameFromUrl(
|
||||
"http://www.example.com/pdfs/file2.txt",
|
||||
"qwerty2.pdf"
|
||||
)
|
||||
).toEqual("qwerty2.pdf");
|
||||
|
||||
// An empty string should be a valid custom fallback filename.
|
||||
expect(getPdfFilenameFromUrl("/pdfs/file3.txt", "")).toEqual("");
|
||||
});
|
||||
|
||||
it("gets fallback filename when url is not a string", function () {
|
||||
expect(getPdfFilenameFromUrl(null)).toEqual("document.pdf");
|
||||
|
||||
expect(getPdfFilenameFromUrl(null, "file.pdf")).toEqual("file.pdf");
|
||||
});
|
||||
|
||||
it("gets PDF filename from URL containing leading/trailing whitespace", function () {
|
||||
// Relative URL
|
||||
expect(getPdfFilenameFromUrl(" /pdfs/file1.pdf ")).toEqual(
|
||||
"file1.pdf"
|
||||
);
|
||||
// Absolute URL
|
||||
expect(
|
||||
getPdfFilenameFromUrl(" http://www.example.com/pdfs/file2.pdf ")
|
||||
).toEqual("file2.pdf");
|
||||
});
|
||||
|
||||
it("gets PDF filename from query string", function () {
|
||||
// Relative URL
|
||||
expect(getPdfFilenameFromUrl("/pdfs/pdfs.html?name=file1.pdf")).toEqual(
|
||||
"file1.pdf"
|
||||
);
|
||||
// Absolute URL
|
||||
expect(
|
||||
getPdfFilenameFromUrl("http://www.example.com/pdfs/pdf.html?file2.pdf")
|
||||
).toEqual("file2.pdf");
|
||||
});
|
||||
|
||||
it("gets PDF filename from hash string", function () {
|
||||
// Relative URL
|
||||
expect(getPdfFilenameFromUrl("/pdfs/pdfs.html#name=file1.pdf")).toEqual(
|
||||
"file1.pdf"
|
||||
);
|
||||
// Absolute URL
|
||||
expect(
|
||||
getPdfFilenameFromUrl("http://www.example.com/pdfs/pdf.html#file2.pdf")
|
||||
).toEqual("file2.pdf");
|
||||
});
|
||||
|
||||
it("gets correct PDF filename when multiple ones are present", function () {
|
||||
// Relative URL
|
||||
expect(getPdfFilenameFromUrl("/pdfs/file1.pdf?name=file.pdf")).toEqual(
|
||||
"file1.pdf"
|
||||
);
|
||||
// Absolute URL
|
||||
expect(
|
||||
getPdfFilenameFromUrl("http://www.example.com/pdfs/file2.pdf#file.pdf")
|
||||
).toEqual("file2.pdf");
|
||||
});
|
||||
|
||||
it("gets PDF filename from URI-encoded data", function () {
|
||||
const encodedUrl = encodeURIComponent(
|
||||
"http://www.example.com/pdfs/file1.pdf"
|
||||
);
|
||||
expect(getPdfFilenameFromUrl(encodedUrl)).toEqual("file1.pdf");
|
||||
|
||||
const encodedUrlWithQuery = encodeURIComponent(
|
||||
"http://www.example.com/pdfs/file.txt?file2.pdf"
|
||||
);
|
||||
expect(getPdfFilenameFromUrl(encodedUrlWithQuery)).toEqual("file2.pdf");
|
||||
});
|
||||
|
||||
it("gets PDF filename from data mistaken for URI-encoded", function () {
|
||||
expect(getPdfFilenameFromUrl("/pdfs/%AA.pdf")).toEqual("%AA.pdf");
|
||||
|
||||
expect(getPdfFilenameFromUrl("/pdfs/%2F.pdf")).toEqual("%2F.pdf");
|
||||
});
|
||||
|
||||
it("gets PDF filename from (some) standard protocols", function () {
|
||||
// HTTP
|
||||
expect(getPdfFilenameFromUrl("http://www.example.com/file1.pdf")).toEqual(
|
||||
"file1.pdf"
|
||||
);
|
||||
// HTTPS
|
||||
expect(
|
||||
getPdfFilenameFromUrl("https://www.example.com/file2.pdf")
|
||||
).toEqual("file2.pdf");
|
||||
// File
|
||||
expect(getPdfFilenameFromUrl("file:///path/to/files/file3.pdf")).toEqual(
|
||||
"file3.pdf"
|
||||
);
|
||||
// FTP
|
||||
expect(getPdfFilenameFromUrl("ftp://www.example.com/file4.pdf")).toEqual(
|
||||
"file4.pdf"
|
||||
);
|
||||
});
|
||||
|
||||
it('gets PDF filename from query string appended to "blob:" URL', function () {
|
||||
const typedArray = new Uint8Array([1, 2, 3, 4, 5]);
|
||||
const blobUrl = URL.createObjectURL(
|
||||
new Blob([typedArray], { type: "application/pdf" })
|
||||
);
|
||||
// Sanity check to ensure that a "blob:" URL was returned.
|
||||
expect(blobUrl.startsWith("blob:")).toEqual(true);
|
||||
|
||||
expect(getPdfFilenameFromUrl(blobUrl + "?file.pdf")).toEqual("file.pdf");
|
||||
});
|
||||
|
||||
it('gets fallback filename from query string appended to "data:" URL', function () {
|
||||
const typedArray = new Uint8Array([1, 2, 3, 4, 5]);
|
||||
const dataUrl = `data:application/pdf;base64,${toBase64Util(typedArray)}`;
|
||||
// Sanity check to ensure that a "data:" URL was returned.
|
||||
expect(dataUrl.startsWith("data:")).toEqual(true);
|
||||
|
||||
expect(getPdfFilenameFromUrl(dataUrl + "?file1.pdf")).toEqual(
|
||||
"document.pdf"
|
||||
);
|
||||
|
||||
// Should correctly detect a "data:" URL with leading whitespace.
|
||||
expect(getPdfFilenameFromUrl(" " + dataUrl + "?file2.pdf")).toEqual(
|
||||
"document.pdf"
|
||||
);
|
||||
});
|
||||
|
||||
it("gets PDF filename with a hash sign", function () {
|
||||
expect(getPdfFilenameFromUrl("/foo.html?file=foo%23.pdf")).toEqual(
|
||||
"foo#.pdf"
|
||||
);
|
||||
|
||||
expect(getPdfFilenameFromUrl("/foo.html?file=%23.pdf")).toEqual("#.pdf");
|
||||
|
||||
expect(getPdfFilenameFromUrl("/foo.html?foo%23.pdf")).toEqual("foo#.pdf");
|
||||
|
||||
expect(getPdfFilenameFromUrl("/foo%23.pdf?a=b#c")).toEqual("foo#.pdf");
|
||||
|
||||
expect(getPdfFilenameFromUrl("foo.html#%23.pdf")).toEqual("#.pdf");
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValidFetchUrl", function () {
|
||||
it("handles invalid Fetch URLs", function () {
|
||||
expect(isValidFetchUrl(null)).toEqual(false);
|
||||
expect(isValidFetchUrl(100)).toEqual(false);
|
||||
expect(isValidFetchUrl("foo")).toEqual(false);
|
||||
expect(isValidFetchUrl("/foo", 100)).toEqual(false);
|
||||
});
|
||||
|
||||
it("handles relative Fetch URLs", function () {
|
||||
expect(isValidFetchUrl("/foo", "file://www.example.com")).toEqual(false);
|
||||
expect(isValidFetchUrl("/foo", "http://www.example.com")).toEqual(true);
|
||||
});
|
||||
|
||||
it("handles unsupported Fetch protocols", function () {
|
||||
expect(isValidFetchUrl("file://www.example.com")).toEqual(false);
|
||||
expect(isValidFetchUrl("ftp://www.example.com")).toEqual(false);
|
||||
});
|
||||
|
||||
it("handles supported Fetch protocols", function () {
|
||||
expect(isValidFetchUrl("http://www.example.com")).toEqual(true);
|
||||
expect(isValidFetchUrl("https://www.example.com")).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("PDFDateString", function () {
|
||||
describe("toDateObject", function () {
|
||||
it("converts PDF date strings to JavaScript `Date` objects", function () {
|
||||
const expectations = {
|
||||
undefined: null,
|
||||
null: null,
|
||||
42: null,
|
||||
2019: null,
|
||||
D2019: null,
|
||||
"D:": null,
|
||||
"D:201": null,
|
||||
"D:2019": new Date(Date.UTC(2019, 0, 1, 0, 0, 0)),
|
||||
"D:20190": new Date(Date.UTC(2019, 0, 1, 0, 0, 0)),
|
||||
"D:201900": new Date(Date.UTC(2019, 0, 1, 0, 0, 0)),
|
||||
"D:201913": new Date(Date.UTC(2019, 0, 1, 0, 0, 0)),
|
||||
"D:201902": new Date(Date.UTC(2019, 1, 1, 0, 0, 0)),
|
||||
"D:2019020": new Date(Date.UTC(2019, 1, 1, 0, 0, 0)),
|
||||
"D:20190200": new Date(Date.UTC(2019, 1, 1, 0, 0, 0)),
|
||||
"D:20190232": new Date(Date.UTC(2019, 1, 1, 0, 0, 0)),
|
||||
"D:20190203": new Date(Date.UTC(2019, 1, 3, 0, 0, 0)),
|
||||
// Invalid dates like the 31th of April are handled by JavaScript:
|
||||
"D:20190431": new Date(Date.UTC(2019, 4, 1, 0, 0, 0)),
|
||||
"D:201902030": new Date(Date.UTC(2019, 1, 3, 0, 0, 0)),
|
||||
"D:2019020300": new Date(Date.UTC(2019, 1, 3, 0, 0, 0)),
|
||||
"D:2019020324": new Date(Date.UTC(2019, 1, 3, 0, 0, 0)),
|
||||
"D:2019020304": new Date(Date.UTC(2019, 1, 3, 4, 0, 0)),
|
||||
"D:20190203040": new Date(Date.UTC(2019, 1, 3, 4, 0, 0)),
|
||||
"D:201902030400": new Date(Date.UTC(2019, 1, 3, 4, 0, 0)),
|
||||
"D:201902030460": new Date(Date.UTC(2019, 1, 3, 4, 0, 0)),
|
||||
"D:201902030405": new Date(Date.UTC(2019, 1, 3, 4, 5, 0)),
|
||||
"D:2019020304050": new Date(Date.UTC(2019, 1, 3, 4, 5, 0)),
|
||||
"D:20190203040500": new Date(Date.UTC(2019, 1, 3, 4, 5, 0)),
|
||||
"D:20190203040560": new Date(Date.UTC(2019, 1, 3, 4, 5, 0)),
|
||||
"D:20190203040506": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506F": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506Z": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506-": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506+": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506+'": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506+0": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506+01": new Date(Date.UTC(2019, 1, 3, 3, 5, 6)),
|
||||
"D:20190203040506+00'": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506+24'": new Date(Date.UTC(2019, 1, 3, 4, 5, 6)),
|
||||
"D:20190203040506+01'": new Date(Date.UTC(2019, 1, 3, 3, 5, 6)),
|
||||
"D:20190203040506+01'0": new Date(Date.UTC(2019, 1, 3, 3, 5, 6)),
|
||||
"D:20190203040506+01'00": new Date(Date.UTC(2019, 1, 3, 3, 5, 6)),
|
||||
"D:20190203040506+01'60": new Date(Date.UTC(2019, 1, 3, 3, 5, 6)),
|
||||
"D:20190203040506+0102": new Date(Date.UTC(2019, 1, 3, 3, 3, 6)),
|
||||
"D:20190203040506+01'02": new Date(Date.UTC(2019, 1, 3, 3, 3, 6)),
|
||||
"D:20190203040506+01'02'": new Date(Date.UTC(2019, 1, 3, 3, 3, 6)),
|
||||
// Offset hour and minute that result in a day change:
|
||||
"D:20190203040506+05'07": new Date(Date.UTC(2019, 1, 2, 22, 58, 6)),
|
||||
};
|
||||
|
||||
for (const [input, expectation] of Object.entries(expectations)) {
|
||||
const result = PDFDateString.toDateObject(input);
|
||||
if (result) {
|
||||
expect(result.getTime()).toEqual(expectation.getTime());
|
||||
} else {
|
||||
expect(result).toEqual(expectation);
|
||||
}
|
||||
}
|
||||
const now = new Date();
|
||||
expect(PDFDateString.toDateObject(now)).toEqual(now);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("findContrastColor", function () {
|
||||
it("Check that the lightness is changed correctly", function () {
|
||||
expect(findContrastColor([210, 98, 76], [197, 113, 89])).toEqual(
|
||||
"#260e09"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("applyOpacity", function () {
|
||||
it("Check that the opacity is applied correctly", function () {
|
||||
if (isNodeJS) {
|
||||
pending("OffscreenCanvas is not supported in Node.js.");
|
||||
}
|
||||
const canvas = new OffscreenCanvas(1, 1);
|
||||
const ctx = canvas.getContext("2d");
|
||||
ctx.fillStyle = "white";
|
||||
ctx.fillRect(0, 0, 1, 1);
|
||||
ctx.fillStyle = "rgb(123, 45, 67)";
|
||||
ctx.globalAlpha = 0.8;
|
||||
ctx.fillRect(0, 0, 1, 1);
|
||||
const [r, g, b] = ctx.getImageData(0, 0, 1, 1).data;
|
||||
expect(applyOpacity(123, 45, 67, ctx.globalAlpha)).toEqual([r, g, b]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("renderRichText", function () {
|
||||
// Unlike other tests we cannot simply compare the HTML-strings since
|
||||
// Chrome and Firefox produce different results. Instead we compare sets
|
||||
// containing the individual parts of the HTML-strings.
|
||||
const splitParts = s => new Set(s.split(/[<>/ ]+/).filter(x => x));
|
||||
|
||||
it("should render plain text", function () {
|
||||
if (isNodeJS) {
|
||||
pending("DOM is not supported in Node.js.");
|
||||
}
|
||||
const container = document.createElement("div");
|
||||
renderRichText(
|
||||
{
|
||||
html: "Hello world!\nThis is a test.",
|
||||
dir: "ltr",
|
||||
className: "foo",
|
||||
},
|
||||
container
|
||||
);
|
||||
expect(splitParts(container.innerHTML)).toEqual(
|
||||
splitParts(
|
||||
'<p dir="ltr" class="richText foo">Hello world!<br>This is a test.</p>'
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
it("should render XFA rich text", function () {
|
||||
if (isNodeJS) {
|
||||
pending("DOM is not supported in Node.js.");
|
||||
}
|
||||
const container = document.createElement("div");
|
||||
const xfaHtml = {
|
||||
name: "div",
|
||||
attributes: { style: { color: "red" } },
|
||||
children: [
|
||||
{
|
||||
name: "p",
|
||||
attributes: { style: { fontSize: "20px" } },
|
||||
children: [
|
||||
{
|
||||
name: "span",
|
||||
attributes: { style: { fontWeight: "bold" } },
|
||||
value: "Hello",
|
||||
},
|
||||
{ name: "#text", value: " world!" },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
renderRichText(
|
||||
{ html: xfaHtml, dir: "ltr", className: "foo" },
|
||||
container
|
||||
);
|
||||
expect(splitParts(container.innerHTML)).toEqual(
|
||||
splitParts(
|
||||
'<div style="color: red;" class="richText foo">' +
|
||||
'<p style="font-size: 20px;">' +
|
||||
'<span style="font-weight: bold;">Hello</span> world!</p></div>'
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
320
test/unit/document_spec.js
Normal file
320
test/unit/document_spec.js
Normal file
@@ -0,0 +1,320 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { createIdFactory, XRefMock } from "./test_utils.js";
|
||||
import { Dict, Name, Ref } from "../../src/core/primitives.js";
|
||||
import { PDFDocument } from "../../src/core/document.js";
|
||||
import { StringStream } from "../../src/core/stream.js";
|
||||
|
||||
describe("document", function () {
|
||||
describe("Page", function () {
|
||||
it("should create correct objId/fontId using the idFactory", function () {
|
||||
const idFactory1 = createIdFactory(/* pageIndex = */ 0);
|
||||
const idFactory2 = createIdFactory(/* pageIndex = */ 1);
|
||||
|
||||
expect(idFactory1.createObjId()).toEqual("p0_1");
|
||||
expect(idFactory1.createObjId()).toEqual("p0_2");
|
||||
expect(idFactory1.createFontId()).toEqual("f1");
|
||||
expect(idFactory1.createFontId()).toEqual("f2");
|
||||
expect(idFactory1.getDocId()).toEqual("g_d0");
|
||||
|
||||
expect(idFactory2.createObjId()).toEqual("p1_1");
|
||||
expect(idFactory2.createObjId()).toEqual("p1_2");
|
||||
expect(idFactory2.createFontId()).toEqual("f1");
|
||||
expect(idFactory2.createFontId()).toEqual("f2");
|
||||
expect(idFactory2.getDocId()).toEqual("g_d0");
|
||||
|
||||
expect(idFactory1.createObjId()).toEqual("p0_3");
|
||||
expect(idFactory1.createObjId()).toEqual("p0_4");
|
||||
expect(idFactory1.createFontId()).toEqual("f3");
|
||||
expect(idFactory1.createFontId()).toEqual("f4");
|
||||
expect(idFactory1.getDocId()).toEqual("g_d0");
|
||||
});
|
||||
});
|
||||
|
||||
describe("PDFDocument", function () {
|
||||
const stream = new StringStream("Dummy_PDF_data");
|
||||
|
||||
function getDocument(acroForm, xref = new XRefMock()) {
|
||||
const catalog = { acroForm };
|
||||
const pdfManager = {
|
||||
get docId() {
|
||||
return "d0";
|
||||
},
|
||||
ensureDoc(prop, args) {
|
||||
return pdfManager.ensure(pdfDocument, prop, args);
|
||||
},
|
||||
ensureCatalog(prop, args) {
|
||||
return pdfManager.ensure(catalog, prop, args);
|
||||
},
|
||||
async ensure(obj, prop, args) {
|
||||
const value = obj[prop];
|
||||
if (typeof value === "function") {
|
||||
return value.apply(obj, args);
|
||||
}
|
||||
return value;
|
||||
},
|
||||
get evaluatorOptions() {
|
||||
return { isOffscreenCanvasSupported: false };
|
||||
},
|
||||
};
|
||||
const pdfDocument = new PDFDocument(pdfManager, stream);
|
||||
pdfDocument.xref = xref;
|
||||
pdfDocument.catalog = catalog;
|
||||
return pdfDocument;
|
||||
}
|
||||
|
||||
it("should get form info when no form data is present", function () {
|
||||
const pdfDocument = getDocument(null);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: false,
|
||||
hasSignatures: false,
|
||||
hasXfa: false,
|
||||
hasFields: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("should get form info when XFA is present", function () {
|
||||
const acroForm = new Dict();
|
||||
|
||||
// The `XFA` entry can only be a non-empty array or stream.
|
||||
acroForm.set("XFA", []);
|
||||
let pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: false,
|
||||
hasSignatures: false,
|
||||
hasXfa: false,
|
||||
hasFields: false,
|
||||
});
|
||||
|
||||
acroForm.set("XFA", ["foo", "bar"]);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: false,
|
||||
hasSignatures: false,
|
||||
hasXfa: true,
|
||||
hasFields: false,
|
||||
});
|
||||
|
||||
acroForm.set("XFA", new StringStream(""));
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: false,
|
||||
hasSignatures: false,
|
||||
hasXfa: false,
|
||||
hasFields: false,
|
||||
});
|
||||
|
||||
acroForm.set("XFA", new StringStream("non-empty"));
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: false,
|
||||
hasSignatures: false,
|
||||
hasXfa: true,
|
||||
hasFields: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("should get form info when AcroForm is present", function () {
|
||||
const acroForm = new Dict();
|
||||
|
||||
// The `Fields` entry can only be a non-empty array.
|
||||
acroForm.set("Fields", []);
|
||||
let pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: false,
|
||||
hasSignatures: false,
|
||||
hasXfa: false,
|
||||
hasFields: false,
|
||||
});
|
||||
|
||||
acroForm.set("Fields", ["foo", "bar"]);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: true,
|
||||
hasSignatures: false,
|
||||
hasXfa: false,
|
||||
hasFields: true,
|
||||
});
|
||||
|
||||
// If the first bit of the `SigFlags` entry is set and the `Fields` array
|
||||
// only contains document signatures, then there is no AcroForm data.
|
||||
acroForm.set("Fields", ["foo", "bar"]);
|
||||
acroForm.set("SigFlags", 2);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: true,
|
||||
hasSignatures: false,
|
||||
hasXfa: false,
|
||||
hasFields: true,
|
||||
});
|
||||
|
||||
const annotationDict = new Dict();
|
||||
annotationDict.set("FT", Name.get("Sig"));
|
||||
annotationDict.set("Rect", [0, 0, 0, 0]);
|
||||
const annotationRef = Ref.get(11, 0);
|
||||
|
||||
const kidsDict = new Dict();
|
||||
kidsDict.set("Kids", [annotationRef]);
|
||||
const kidsRef = Ref.get(10, 0);
|
||||
|
||||
const xref = new XRefMock([
|
||||
{ ref: annotationRef, data: annotationDict },
|
||||
{ ref: kidsRef, data: kidsDict },
|
||||
]);
|
||||
|
||||
acroForm.set("Fields", [kidsRef]);
|
||||
acroForm.set("SigFlags", 3);
|
||||
pdfDocument = getDocument(acroForm, xref);
|
||||
expect(pdfDocument.formInfo).toEqual({
|
||||
hasAcroForm: false,
|
||||
hasSignatures: true,
|
||||
hasXfa: false,
|
||||
hasFields: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("should get calculation order array or null", function () {
|
||||
const acroForm = new Dict();
|
||||
|
||||
let pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.calculationOrderIds).toEqual(null);
|
||||
|
||||
acroForm.set("CO", [Ref.get(1, 0), Ref.get(2, 0), Ref.get(3, 0)]);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.calculationOrderIds).toEqual(["1R", "2R", "3R"]);
|
||||
|
||||
acroForm.set("CO", []);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.calculationOrderIds).toEqual(null);
|
||||
|
||||
acroForm.set("CO", ["1", "2"]);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.calculationOrderIds).toEqual(null);
|
||||
|
||||
acroForm.set("CO", ["1", Ref.get(1, 0), "2"]);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
expect(pdfDocument.calculationOrderIds).toEqual(["1R"]);
|
||||
});
|
||||
|
||||
it("should get field objects array or null", async function () {
|
||||
const acroForm = new Dict();
|
||||
|
||||
let pdfDocument = getDocument(acroForm);
|
||||
let fields = await pdfDocument.fieldObjects;
|
||||
expect(fields).toEqual(null);
|
||||
|
||||
acroForm.set("Fields", []);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
fields = await pdfDocument.fieldObjects;
|
||||
expect(fields).toEqual(null);
|
||||
|
||||
const kid1Ref = Ref.get(314, 0);
|
||||
const kid11Ref = Ref.get(159, 0);
|
||||
const kid2Ref = Ref.get(265, 0);
|
||||
const kid2BisRef = Ref.get(266, 0);
|
||||
const parentRef = Ref.get(358, 0);
|
||||
|
||||
const allFields = Object.create(null);
|
||||
for (const name of ["parent", "kid1", "kid2", "kid11"]) {
|
||||
const buttonWidgetDict = new Dict();
|
||||
buttonWidgetDict.set("Type", Name.get("Annot"));
|
||||
buttonWidgetDict.set("Subtype", Name.get("Widget"));
|
||||
buttonWidgetDict.set("FT", Name.get("Btn"));
|
||||
buttonWidgetDict.set("T", name);
|
||||
allFields[name] = buttonWidgetDict;
|
||||
}
|
||||
|
||||
allFields.kid1.set("Kids", [kid11Ref]);
|
||||
allFields.parent.set("Kids", [kid1Ref, kid2Ref, kid2BisRef]);
|
||||
|
||||
const xref = new XRefMock([
|
||||
{ ref: parentRef, data: allFields.parent },
|
||||
{ ref: kid1Ref, data: allFields.kid1 },
|
||||
{ ref: kid11Ref, data: allFields.kid11 },
|
||||
{ ref: kid2Ref, data: allFields.kid2 },
|
||||
{ ref: kid2BisRef, data: allFields.kid2 },
|
||||
]);
|
||||
|
||||
acroForm.set("Fields", [parentRef]);
|
||||
pdfDocument = getDocument(acroForm, xref);
|
||||
fields = (await pdfDocument.fieldObjects).allFields;
|
||||
|
||||
for (const [name, objs] of Object.entries(fields)) {
|
||||
fields[name] = objs.map(obj => obj.id);
|
||||
}
|
||||
|
||||
expect(fields["parent.kid1"]).toEqual(["314R"]);
|
||||
expect(fields["parent.kid1.kid11"]).toEqual(["159R"]);
|
||||
expect(fields["parent.kid2"]).toEqual(["265R", "266R"]);
|
||||
expect(fields.parent).toEqual(["358R"]);
|
||||
});
|
||||
|
||||
it("should check if fields have any actions", async function () {
|
||||
const acroForm = new Dict();
|
||||
|
||||
let pdfDocument = getDocument(acroForm);
|
||||
let hasJSActions = await pdfDocument.hasJSActions;
|
||||
expect(hasJSActions).toEqual(false);
|
||||
|
||||
acroForm.set("Fields", []);
|
||||
pdfDocument = getDocument(acroForm);
|
||||
hasJSActions = await pdfDocument.hasJSActions;
|
||||
expect(hasJSActions).toEqual(false);
|
||||
|
||||
const kid1Ref = Ref.get(314, 0);
|
||||
const kid11Ref = Ref.get(159, 0);
|
||||
const kid2Ref = Ref.get(265, 0);
|
||||
const parentRef = Ref.get(358, 0);
|
||||
|
||||
const allFields = Object.create(null);
|
||||
for (const name of ["parent", "kid1", "kid2", "kid11"]) {
|
||||
const buttonWidgetDict = new Dict();
|
||||
buttonWidgetDict.set("Type", Name.get("Annot"));
|
||||
buttonWidgetDict.set("Subtype", Name.get("Widget"));
|
||||
buttonWidgetDict.set("FT", Name.get("Btn"));
|
||||
buttonWidgetDict.set("T", name);
|
||||
allFields[name] = buttonWidgetDict;
|
||||
}
|
||||
|
||||
allFields.kid1.set("Kids", [kid11Ref]);
|
||||
allFields.parent.set("Kids", [kid1Ref, kid2Ref]);
|
||||
|
||||
const xref = new XRefMock([
|
||||
{ ref: parentRef, data: allFields.parent },
|
||||
{ ref: kid1Ref, data: allFields.kid1 },
|
||||
{ ref: kid11Ref, data: allFields.kid11 },
|
||||
{ ref: kid2Ref, data: allFields.kid2 },
|
||||
]);
|
||||
|
||||
acroForm.set("Fields", [parentRef]);
|
||||
pdfDocument = getDocument(acroForm, xref);
|
||||
hasJSActions = await pdfDocument.hasJSActions;
|
||||
expect(hasJSActions).toEqual(false);
|
||||
|
||||
const JS = Name.get("JavaScript");
|
||||
const additionalActionsDict = new Dict();
|
||||
const eDict = new Dict();
|
||||
eDict.set("JS", "hello()");
|
||||
eDict.set("S", JS);
|
||||
additionalActionsDict.set("E", eDict);
|
||||
allFields.kid2.set("AA", additionalActionsDict);
|
||||
|
||||
pdfDocument = getDocument(acroForm, xref);
|
||||
hasJSActions = await pdfDocument.hasJSActions;
|
||||
expect(hasJSActions).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
141
test/unit/editor_spec.js
Normal file
141
test/unit/editor_spec.js
Normal file
@@ -0,0 +1,141 @@
|
||||
/* Copyright 2022 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { CommandManager } from "../../src/display/editor/tools.js";
|
||||
import { SignatureExtractor } from "../../src/display/editor/drawers/signaturedraw.js";
|
||||
|
||||
describe("editor", function () {
|
||||
describe("Command Manager", function () {
|
||||
it("should check undo/redo", function () {
|
||||
const manager = new CommandManager(4);
|
||||
let x = 0;
|
||||
const makeDoUndo = n => ({ cmd: () => (x += n), undo: () => (x -= n) });
|
||||
|
||||
manager.add({ ...makeDoUndo(1), mustExec: true });
|
||||
expect(x).toEqual(1);
|
||||
|
||||
manager.add({ ...makeDoUndo(2), mustExec: true });
|
||||
expect(x).toEqual(3);
|
||||
|
||||
manager.add({ ...makeDoUndo(3), mustExec: true });
|
||||
expect(x).toEqual(6);
|
||||
|
||||
manager.undo();
|
||||
expect(x).toEqual(3);
|
||||
|
||||
manager.undo();
|
||||
expect(x).toEqual(1);
|
||||
|
||||
manager.undo();
|
||||
expect(x).toEqual(0);
|
||||
|
||||
manager.undo();
|
||||
expect(x).toEqual(0);
|
||||
|
||||
manager.redo();
|
||||
expect(x).toEqual(1);
|
||||
|
||||
manager.redo();
|
||||
expect(x).toEqual(3);
|
||||
|
||||
manager.redo();
|
||||
expect(x).toEqual(6);
|
||||
|
||||
manager.redo();
|
||||
expect(x).toEqual(6);
|
||||
|
||||
manager.undo();
|
||||
expect(x).toEqual(3);
|
||||
|
||||
manager.redo();
|
||||
expect(x).toEqual(6);
|
||||
});
|
||||
});
|
||||
|
||||
it("should hit the limit of the manager", function () {
|
||||
const manager = new CommandManager(3);
|
||||
let x = 0;
|
||||
const makeDoUndo = n => ({ cmd: () => (x += n), undo: () => (x -= n) });
|
||||
|
||||
manager.add({ ...makeDoUndo(1), mustExec: true }); // 1
|
||||
manager.add({ ...makeDoUndo(2), mustExec: true }); // 3
|
||||
manager.add({ ...makeDoUndo(3), mustExec: true }); // 6
|
||||
manager.add({ ...makeDoUndo(4), mustExec: true }); // 10
|
||||
expect(x).toEqual(10);
|
||||
|
||||
manager.undo();
|
||||
manager.undo();
|
||||
expect(x).toEqual(3);
|
||||
|
||||
manager.undo();
|
||||
expect(x).toEqual(1);
|
||||
|
||||
manager.undo();
|
||||
expect(x).toEqual(1);
|
||||
|
||||
manager.redo();
|
||||
manager.redo();
|
||||
expect(x).toEqual(6);
|
||||
manager.add({ ...makeDoUndo(5), mustExec: true });
|
||||
expect(x).toEqual(11);
|
||||
});
|
||||
|
||||
it("should check signature compression/decompression", async () => {
|
||||
let gen = n => new Float32Array(crypto.getRandomValues(new Uint16Array(n)));
|
||||
let outlines = [102, 28, 254, 4536, 10, 14532, 512].map(gen);
|
||||
const signature = {
|
||||
outlines,
|
||||
areContours: false,
|
||||
thickness: 1,
|
||||
width: 123,
|
||||
height: 456,
|
||||
};
|
||||
let compressed = await SignatureExtractor.compressSignature(signature);
|
||||
let decompressed = await SignatureExtractor.decompressSignature(compressed);
|
||||
expect(decompressed).toEqual(signature);
|
||||
|
||||
signature.thickness = 2;
|
||||
compressed = await SignatureExtractor.compressSignature(signature);
|
||||
decompressed = await SignatureExtractor.decompressSignature(compressed);
|
||||
expect(decompressed).toEqual(signature);
|
||||
|
||||
signature.areContours = true;
|
||||
compressed = await SignatureExtractor.compressSignature(signature);
|
||||
decompressed = await SignatureExtractor.decompressSignature(compressed);
|
||||
expect(decompressed).toEqual(signature);
|
||||
|
||||
// Numbers are small enough to be compressed with Uint8Array.
|
||||
gen = n =>
|
||||
new Float32Array(
|
||||
crypto.getRandomValues(new Uint8Array(n)).map(x => x / 10)
|
||||
);
|
||||
outlines = [100, 200, 300, 10, 80].map(gen);
|
||||
signature.outlines = outlines;
|
||||
compressed = await SignatureExtractor.compressSignature(signature);
|
||||
decompressed = await SignatureExtractor.decompressSignature(compressed);
|
||||
expect(decompressed).toEqual(signature);
|
||||
|
||||
// Numbers are large enough to be compressed with Uint16Array.
|
||||
gen = n =>
|
||||
new Float32Array(
|
||||
crypto.getRandomValues(new Uint16Array(n)).map(x => x / 10)
|
||||
);
|
||||
outlines = [100, 200, 300, 10, 80].map(gen);
|
||||
signature.outlines = outlines;
|
||||
compressed = await SignatureExtractor.compressSignature(signature);
|
||||
decompressed = await SignatureExtractor.decompressSignature(compressed);
|
||||
expect(decompressed).toEqual(signature);
|
||||
});
|
||||
});
|
||||
46
test/unit/encodings_spec.js
Normal file
46
test/unit/encodings_spec.js
Normal file
@@ -0,0 +1,46 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { getEncoding } from "../../src/core/encodings.js";
|
||||
|
||||
describe("encodings", function () {
|
||||
describe("getEncoding", function () {
|
||||
it("fetches a valid array for known encoding names", function () {
|
||||
const knownEncodingNames = [
|
||||
"ExpertEncoding",
|
||||
"MacExpertEncoding",
|
||||
"MacRomanEncoding",
|
||||
"StandardEncoding",
|
||||
"SymbolSetEncoding",
|
||||
"WinAnsiEncoding",
|
||||
"ZapfDingbatsEncoding",
|
||||
];
|
||||
|
||||
for (const knownEncodingName of knownEncodingNames) {
|
||||
const encoding = getEncoding(knownEncodingName);
|
||||
expect(Array.isArray(encoding)).toEqual(true);
|
||||
expect(encoding.length).toEqual(256);
|
||||
|
||||
for (const item of encoding) {
|
||||
expect(typeof item).toEqual("string");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it("fetches `null` for unknown encoding names", function () {
|
||||
expect(getEncoding("FooBarEncoding")).toEqual(null);
|
||||
});
|
||||
});
|
||||
});
|
||||
464
test/unit/evaluator_spec.js
Normal file
464
test/unit/evaluator_spec.js
Normal file
@@ -0,0 +1,464 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { createIdFactory, XRefMock } from "./test_utils.js";
|
||||
import { Dict, Name } from "../../src/core/primitives.js";
|
||||
import { FormatError, OPS } from "../../src/shared/util.js";
|
||||
import { Stream, StringStream } from "../../src/core/stream.js";
|
||||
import { OperatorList } from "../../src/core/operator_list.js";
|
||||
import { PartialEvaluator } from "../../src/core/evaluator.js";
|
||||
import { WorkerTask } from "../../src/core/worker.js";
|
||||
|
||||
describe("evaluator", function () {
|
||||
function HandlerMock() {
|
||||
this.inputs = [];
|
||||
}
|
||||
HandlerMock.prototype = {
|
||||
send(name, data) {
|
||||
this.inputs.push({ name, data });
|
||||
},
|
||||
};
|
||||
function ResourcesMock() {}
|
||||
ResourcesMock.prototype = {
|
||||
get(name) {
|
||||
return this[name];
|
||||
},
|
||||
};
|
||||
|
||||
async function runOperatorListCheck(evaluator, stream, resources) {
|
||||
const operatorList = new OperatorList();
|
||||
const task = new WorkerTask("OperatorListCheck");
|
||||
await evaluator.getOperatorList({
|
||||
stream,
|
||||
task,
|
||||
resources,
|
||||
operatorList,
|
||||
});
|
||||
return operatorList;
|
||||
}
|
||||
|
||||
let partialEvaluator;
|
||||
|
||||
beforeAll(function () {
|
||||
partialEvaluator = new PartialEvaluator({
|
||||
xref: new XRefMock(),
|
||||
handler: new HandlerMock(),
|
||||
pageIndex: 0,
|
||||
idFactory: createIdFactory(/* pageIndex = */ 0),
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
partialEvaluator = null;
|
||||
});
|
||||
|
||||
describe("splitCombinedOperations", function () {
|
||||
it("should reject unknown operations", async function () {
|
||||
const stream = new StringStream("fTT");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(1);
|
||||
expect(result.fnArray[0]).toEqual(OPS.constructPath);
|
||||
expect(result.argsArray[0]).toEqual([OPS.fill, [null], null]);
|
||||
});
|
||||
|
||||
it("should handle one operation", async function () {
|
||||
const stream = new StringStream("Q");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(1);
|
||||
expect(result.fnArray[0]).toEqual(OPS.restore);
|
||||
});
|
||||
|
||||
it("should handle two glued operations", async function () {
|
||||
const imgDict = new Dict();
|
||||
imgDict.set("Subtype", Name.get("Image"));
|
||||
imgDict.set("Width", 1);
|
||||
imgDict.set("Height", 1);
|
||||
|
||||
const imgStream = new Stream([0]);
|
||||
imgStream.dict = imgDict;
|
||||
|
||||
const xObject = new Dict();
|
||||
xObject.set("Res1", imgStream);
|
||||
|
||||
const resources = new ResourcesMock();
|
||||
resources.XObject = xObject;
|
||||
|
||||
const stream = new StringStream("/Res1 DoQ");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources
|
||||
);
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.dependency);
|
||||
expect(result.fnArray[1]).toEqual(OPS.paintImageXObject);
|
||||
expect(result.fnArray[2]).toEqual(OPS.restore);
|
||||
expect(result.argsArray.length).toEqual(3);
|
||||
expect(result.argsArray[0]).toEqual(["img_p0_1"]);
|
||||
expect(result.argsArray[1]).toEqual(["img_p0_1", 1, 1]);
|
||||
expect(result.argsArray[2]).toEqual(null);
|
||||
});
|
||||
|
||||
it("should handle three glued operations", async function () {
|
||||
const stream = new StringStream("fff");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray).toEqual([
|
||||
OPS.constructPath,
|
||||
OPS.constructPath,
|
||||
OPS.constructPath,
|
||||
]);
|
||||
expect(result.argsArray[0][0]).toEqual(OPS.fill);
|
||||
expect(result.argsArray[1][0]).toEqual(OPS.fill);
|
||||
expect(result.argsArray[2][0]).toEqual(OPS.fill);
|
||||
});
|
||||
|
||||
it("should handle three glued operations #2", async function () {
|
||||
const resources = new ResourcesMock();
|
||||
resources.Res1 = {};
|
||||
const stream = new StringStream("B*Bf*");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources
|
||||
);
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray).toEqual([
|
||||
OPS.constructPath,
|
||||
OPS.constructPath,
|
||||
OPS.constructPath,
|
||||
]);
|
||||
expect(result.argsArray[0][0]).toEqual(OPS.eoFillStroke);
|
||||
expect(result.argsArray[1][0]).toEqual(OPS.fillStroke);
|
||||
expect(result.argsArray[2][0]).toEqual(OPS.eoFill);
|
||||
});
|
||||
|
||||
it("should handle glued operations and operands", async function () {
|
||||
const stream = new StringStream("f5 Ts");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(2);
|
||||
expect(result.fnArray[0]).toEqual(OPS.constructPath);
|
||||
expect(result.fnArray[1]).toEqual(OPS.setTextRise);
|
||||
expect(result.argsArray.length).toEqual(2);
|
||||
expect(result.argsArray[1].length).toEqual(1);
|
||||
expect(result.argsArray[1][0]).toEqual(5);
|
||||
});
|
||||
|
||||
it("should handle glued operations and literals", async function () {
|
||||
const stream = new StringStream("trueifalserinulln");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.setFlatness);
|
||||
expect(result.fnArray[1]).toEqual(OPS.setRenderingIntent);
|
||||
expect(result.fnArray[2]).toEqual(OPS.constructPath);
|
||||
expect(result.argsArray.length).toEqual(3);
|
||||
expect(result.argsArray[0].length).toEqual(1);
|
||||
expect(result.argsArray[0][0]).toEqual(true);
|
||||
expect(result.argsArray[1].length).toEqual(1);
|
||||
expect(result.argsArray[1][0]).toEqual(false);
|
||||
expect(result.argsArray[2]).toEqual([OPS.endPath, [null], null]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateNumberOfArgs", function () {
|
||||
it("should execute if correct number of arguments", async function () {
|
||||
const stream = new StringStream("5 1 d0");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(result.argsArray[0][0]).toEqual(5);
|
||||
expect(result.argsArray[0][1]).toEqual(1);
|
||||
expect(result.fnArray[0]).toEqual(OPS.setCharWidth);
|
||||
});
|
||||
|
||||
it("should execute if too many arguments", async function () {
|
||||
const stream = new StringStream("5 1 4 d0");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(result.argsArray[0][0]).toEqual(1);
|
||||
expect(result.argsArray[0][1]).toEqual(4);
|
||||
expect(result.fnArray[0]).toEqual(OPS.setCharWidth);
|
||||
});
|
||||
|
||||
it("should execute if nested commands", async function () {
|
||||
const gState = new Dict();
|
||||
gState.set("LW", 2);
|
||||
gState.set("CA", 0.5);
|
||||
|
||||
const extGState = new Dict();
|
||||
extGState.set("GS2", gState);
|
||||
|
||||
const resources = new ResourcesMock();
|
||||
resources.ExtGState = extGState;
|
||||
|
||||
const stream = new StringStream("/F2 /GS2 gs 5.711 Tf");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources
|
||||
);
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.setGState);
|
||||
expect(result.fnArray[1]).toEqual(OPS.dependency);
|
||||
expect(result.fnArray[2]).toEqual(OPS.setFont);
|
||||
expect(result.argsArray.length).toEqual(3);
|
||||
expect(result.argsArray[0]).toEqual([
|
||||
[
|
||||
["LW", 2],
|
||||
["CA", 0.5],
|
||||
],
|
||||
]);
|
||||
expect(result.argsArray[1]).toEqual(["g_font_error"]);
|
||||
expect(result.argsArray[2]).toEqual(["g_font_error", 5.711]);
|
||||
});
|
||||
|
||||
it("should skip if too few arguments", async function () {
|
||||
const stream = new StringStream("5 d0");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(result.argsArray).toEqual([]);
|
||||
expect(result.fnArray).toEqual([]);
|
||||
});
|
||||
|
||||
it(
|
||||
"should error if (many) path operators have too few arguments " +
|
||||
"(bug 1443140)",
|
||||
async function () {
|
||||
const NUM_INVALID_OPS = 25;
|
||||
|
||||
// Non-path operators, should be ignored.
|
||||
const invalidMoveText = "10 Td\n".repeat(NUM_INVALID_OPS);
|
||||
const moveTextStream = new StringStream(invalidMoveText);
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
moveTextStream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(result.argsArray).toEqual([]);
|
||||
expect(result.fnArray).toEqual([]);
|
||||
|
||||
// Path operators, should throw error.
|
||||
const invalidLineTo = "20 l\n".repeat(NUM_INVALID_OPS);
|
||||
const lineToStream = new StringStream(invalidLineTo);
|
||||
|
||||
try {
|
||||
await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
lineToStream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (reason) {
|
||||
expect(reason instanceof FormatError).toEqual(true);
|
||||
expect(reason.message).toEqual(
|
||||
"Invalid command l: expected 2 args, but received 1 args."
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
it("should close opened saves", async function () {
|
||||
const stream = new StringStream("qq");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(4);
|
||||
expect(result.fnArray[0]).toEqual(OPS.save);
|
||||
expect(result.fnArray[1]).toEqual(OPS.save);
|
||||
expect(result.fnArray[2]).toEqual(OPS.restore);
|
||||
expect(result.fnArray[3]).toEqual(OPS.restore);
|
||||
});
|
||||
|
||||
it("should error on paintXObject if name is missing", async function () {
|
||||
const stream = new StringStream("/ Do");
|
||||
|
||||
try {
|
||||
await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (reason) {
|
||||
expect(reason instanceof FormatError).toEqual(true);
|
||||
expect(reason.message).toEqual("XObject should be a stream");
|
||||
}
|
||||
});
|
||||
|
||||
it("should skip paintXObject if subtype is PS", async function () {
|
||||
const xobjStreamDict = new Dict();
|
||||
xobjStreamDict.set("Subtype", Name.get("PS"));
|
||||
const xobjStream = new Stream([], 0, 0, xobjStreamDict);
|
||||
|
||||
const xobjs = new Dict();
|
||||
xobjs.set("Res1", xobjStream);
|
||||
|
||||
const resources = new Dict();
|
||||
resources.set("XObject", xobjs);
|
||||
|
||||
const stream = new StringStream("/Res1 Do");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources
|
||||
);
|
||||
expect(result.argsArray).toEqual([]);
|
||||
expect(result.fnArray).toEqual([]);
|
||||
});
|
||||
|
||||
it("should handle invalid dash stuff", async function () {
|
||||
const stream = new StringStream("[ none ] 0 d");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
new ResourcesMock()
|
||||
);
|
||||
expect(result.argsArray[0][0]).toEqual([]);
|
||||
expect(result.argsArray[0][1]).toEqual(0);
|
||||
expect(result.fnArray[0]).toEqual(OPS.setDash);
|
||||
});
|
||||
});
|
||||
|
||||
describe("thread control", function () {
|
||||
it("should abort operator list parsing", async function () {
|
||||
const stream = new StringStream("qqQQ");
|
||||
const resources = new ResourcesMock();
|
||||
const result = new OperatorList();
|
||||
const task = new WorkerTask("OperatorListAbort");
|
||||
task.terminate();
|
||||
|
||||
try {
|
||||
await partialEvaluator.getOperatorList({
|
||||
stream,
|
||||
task,
|
||||
resources,
|
||||
operatorList: result,
|
||||
});
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch {
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(0);
|
||||
}
|
||||
});
|
||||
|
||||
it("should abort text content parsing", async function () {
|
||||
const resources = new ResourcesMock();
|
||||
const stream = new StringStream("qqQQ");
|
||||
const task = new WorkerTask("TextContentAbort");
|
||||
task.terminate();
|
||||
|
||||
try {
|
||||
await partialEvaluator.getTextContent({
|
||||
stream,
|
||||
task,
|
||||
resources,
|
||||
});
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch {
|
||||
expect(true).toEqual(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("operator list", function () {
|
||||
class StreamSinkMock {
|
||||
enqueue() {}
|
||||
}
|
||||
|
||||
it("should get correct total length after flushing", function () {
|
||||
const operatorList = new OperatorList(null, new StreamSinkMock());
|
||||
operatorList.addOp(OPS.save, null);
|
||||
operatorList.addOp(OPS.restore, null);
|
||||
|
||||
expect(operatorList.totalLength).toEqual(2);
|
||||
expect(operatorList.length).toEqual(2);
|
||||
|
||||
operatorList.flush();
|
||||
|
||||
expect(operatorList.totalLength).toEqual(2);
|
||||
expect(operatorList.length).toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("graphics-state operators", function () {
|
||||
it("should convert negative line width to absolute value in the graphic state", async function () {
|
||||
const gState = new Dict();
|
||||
gState.set("LW", -5);
|
||||
const extGState = new Dict();
|
||||
extGState.set("GSneg", gState);
|
||||
|
||||
const resources = new ResourcesMock();
|
||||
resources.ExtGState = extGState;
|
||||
|
||||
const stream = new StringStream("/GSneg gs");
|
||||
const result = await runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources
|
||||
);
|
||||
|
||||
expect(result.fnArray).toEqual([OPS.setGState]);
|
||||
|
||||
const stateEntries = result.argsArray[0][0];
|
||||
const lwEntry = stateEntries.find(([key]) => key === "LW");
|
||||
expect(lwEntry).toBeDefined();
|
||||
expect(lwEntry[1]).toEqual(5);
|
||||
});
|
||||
});
|
||||
});
|
||||
340
test/unit/event_utils_spec.js
Normal file
340
test/unit/event_utils_spec.js
Normal file
@@ -0,0 +1,340 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
EventBus,
|
||||
waitOnEventOrTimeout,
|
||||
WaitOnType,
|
||||
} from "../../web/event_utils.js";
|
||||
import { isNodeJS } from "../../src/shared/util.js";
|
||||
|
||||
describe("event_utils", function () {
|
||||
describe("EventBus", function () {
|
||||
it("dispatch event", function () {
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
eventBus.on("test", function (evt) {
|
||||
expect(evt).toEqual(undefined);
|
||||
count++;
|
||||
});
|
||||
eventBus.dispatch("test");
|
||||
expect(count).toEqual(1);
|
||||
});
|
||||
it("dispatch event with arguments", function () {
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
eventBus.on("test", function (evt) {
|
||||
expect(evt).toEqual({ abc: 123 });
|
||||
count++;
|
||||
});
|
||||
eventBus.dispatch("test", {
|
||||
abc: 123,
|
||||
});
|
||||
expect(count).toEqual(1);
|
||||
});
|
||||
it("dispatch different event", function () {
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
eventBus.on("test", function () {
|
||||
count++;
|
||||
});
|
||||
eventBus.dispatch("nottest");
|
||||
expect(count).toEqual(0);
|
||||
});
|
||||
it("dispatch event multiple times", function () {
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
eventBus.dispatch("test");
|
||||
eventBus.on("test", function () {
|
||||
count++;
|
||||
});
|
||||
eventBus.dispatch("test");
|
||||
eventBus.dispatch("test");
|
||||
expect(count).toEqual(2);
|
||||
});
|
||||
it("dispatch event to multiple handlers", function () {
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
eventBus.on("test", function () {
|
||||
count++;
|
||||
});
|
||||
eventBus.on("test", function () {
|
||||
count++;
|
||||
});
|
||||
eventBus.dispatch("test");
|
||||
expect(count).toEqual(2);
|
||||
});
|
||||
it("dispatch to detached", function () {
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
const listener = function () {
|
||||
count++;
|
||||
};
|
||||
eventBus.on("test", listener);
|
||||
eventBus.dispatch("test");
|
||||
eventBus.off("test", listener);
|
||||
eventBus.dispatch("test");
|
||||
expect(count).toEqual(1);
|
||||
});
|
||||
it("dispatch to wrong detached", function () {
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
eventBus.on("test", function () {
|
||||
count++;
|
||||
});
|
||||
eventBus.dispatch("test");
|
||||
eventBus.off("test", function () {
|
||||
count++;
|
||||
});
|
||||
eventBus.dispatch("test");
|
||||
expect(count).toEqual(2);
|
||||
});
|
||||
it("dispatch to detached during handling", function () {
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
const listener1 = function () {
|
||||
eventBus.off("test", listener2);
|
||||
count++;
|
||||
};
|
||||
const listener2 = function () {
|
||||
eventBus.off("test", listener1);
|
||||
count++;
|
||||
};
|
||||
eventBus.on("test", listener1);
|
||||
eventBus.on("test", listener2);
|
||||
eventBus.dispatch("test");
|
||||
eventBus.dispatch("test");
|
||||
expect(count).toEqual(2);
|
||||
});
|
||||
|
||||
it("dispatch event to handlers with/without 'once' option", function () {
|
||||
const eventBus = new EventBus();
|
||||
let multipleCount = 0,
|
||||
onceCount = 0;
|
||||
|
||||
eventBus.on("test", function () {
|
||||
multipleCount++;
|
||||
});
|
||||
eventBus.on(
|
||||
"test",
|
||||
function () {
|
||||
onceCount++;
|
||||
},
|
||||
{ once: true }
|
||||
);
|
||||
|
||||
eventBus.dispatch("test");
|
||||
eventBus.dispatch("test");
|
||||
eventBus.dispatch("test");
|
||||
|
||||
expect(multipleCount).toEqual(3);
|
||||
expect(onceCount).toEqual(1);
|
||||
});
|
||||
|
||||
it("dispatch event to handlers with/without 'signal' option, aborted *before* dispatch", function () {
|
||||
const eventBus = new EventBus();
|
||||
const ac = new AbortController();
|
||||
let multipleCount = 0,
|
||||
noneCount = 0;
|
||||
|
||||
eventBus.on("test", function () {
|
||||
multipleCount++;
|
||||
});
|
||||
eventBus.on(
|
||||
"test",
|
||||
function () {
|
||||
noneCount++;
|
||||
},
|
||||
{ signal: ac.signal }
|
||||
);
|
||||
|
||||
ac.abort();
|
||||
|
||||
eventBus.dispatch("test");
|
||||
eventBus.dispatch("test");
|
||||
eventBus.dispatch("test");
|
||||
|
||||
expect(multipleCount).toEqual(3);
|
||||
expect(noneCount).toEqual(0);
|
||||
});
|
||||
|
||||
it("dispatch event to handlers with/without 'signal' option, aborted *after* dispatch", function () {
|
||||
const eventBus = new EventBus();
|
||||
const ac = new AbortController();
|
||||
let multipleCount = 0,
|
||||
onceCount = 0;
|
||||
|
||||
eventBus.on("test", function () {
|
||||
multipleCount++;
|
||||
});
|
||||
eventBus.on(
|
||||
"test",
|
||||
function () {
|
||||
onceCount++;
|
||||
},
|
||||
{ signal: ac.signal }
|
||||
);
|
||||
|
||||
eventBus.dispatch("test");
|
||||
ac.abort();
|
||||
|
||||
eventBus.dispatch("test");
|
||||
eventBus.dispatch("test");
|
||||
|
||||
expect(multipleCount).toEqual(3);
|
||||
expect(onceCount).toEqual(1);
|
||||
});
|
||||
|
||||
it("should not re-dispatch to DOM", async function () {
|
||||
if (isNodeJS) {
|
||||
pending("Document is not supported in Node.js.");
|
||||
}
|
||||
const eventBus = new EventBus();
|
||||
let count = 0;
|
||||
eventBus.on("test", function (evt) {
|
||||
expect(evt).toEqual(undefined);
|
||||
count++;
|
||||
});
|
||||
function domEventListener() {
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
}
|
||||
document.addEventListener("test", domEventListener);
|
||||
|
||||
eventBus.dispatch("test");
|
||||
|
||||
await Promise.resolve();
|
||||
expect(count).toEqual(1);
|
||||
|
||||
document.removeEventListener("test", domEventListener);
|
||||
});
|
||||
});
|
||||
|
||||
describe("waitOnEventOrTimeout", function () {
|
||||
let eventBus;
|
||||
|
||||
beforeAll(function () {
|
||||
eventBus = new EventBus();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
eventBus = null;
|
||||
});
|
||||
|
||||
it("should reject invalid parameters", async function () {
|
||||
const invalidTarget = waitOnEventOrTimeout({
|
||||
target: "window",
|
||||
name: "DOMContentLoaded",
|
||||
}).then(
|
||||
function () {
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
},
|
||||
function (reason) {
|
||||
expect(reason instanceof Error).toEqual(true);
|
||||
}
|
||||
);
|
||||
|
||||
const invalidName = waitOnEventOrTimeout({
|
||||
target: eventBus,
|
||||
name: "",
|
||||
}).then(
|
||||
function () {
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
},
|
||||
function (reason) {
|
||||
expect(reason instanceof Error).toEqual(true);
|
||||
}
|
||||
);
|
||||
|
||||
const invalidDelay = waitOnEventOrTimeout({
|
||||
target: eventBus,
|
||||
name: "pagerendered",
|
||||
delay: -1000,
|
||||
}).then(
|
||||
function () {
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
},
|
||||
function (reason) {
|
||||
expect(reason instanceof Error).toEqual(true);
|
||||
}
|
||||
);
|
||||
|
||||
await Promise.all([invalidTarget, invalidName, invalidDelay]);
|
||||
});
|
||||
|
||||
it("should resolve on event, using the DOM", async function () {
|
||||
if (isNodeJS) {
|
||||
pending("Document is not supported in Node.js.");
|
||||
}
|
||||
const button = document.createElement("button");
|
||||
|
||||
const buttonClicked = waitOnEventOrTimeout({
|
||||
target: button,
|
||||
name: "click",
|
||||
delay: 10000,
|
||||
});
|
||||
// Immediately dispatch the expected event.
|
||||
button.click();
|
||||
|
||||
const type = await buttonClicked;
|
||||
expect(type).toEqual(WaitOnType.EVENT);
|
||||
});
|
||||
|
||||
it("should resolve on timeout, using the DOM", async function () {
|
||||
if (isNodeJS) {
|
||||
pending("Document is not supported in Node.js.");
|
||||
}
|
||||
const button = document.createElement("button");
|
||||
|
||||
const buttonClicked = waitOnEventOrTimeout({
|
||||
target: button,
|
||||
name: "click",
|
||||
delay: 10,
|
||||
});
|
||||
// Do *not* dispatch the event, and wait for the timeout.
|
||||
|
||||
const type = await buttonClicked;
|
||||
expect(type).toEqual(WaitOnType.TIMEOUT);
|
||||
});
|
||||
|
||||
it("should resolve on event, using the EventBus", async function () {
|
||||
const pageRendered = waitOnEventOrTimeout({
|
||||
target: eventBus,
|
||||
name: "pagerendered",
|
||||
delay: 10000,
|
||||
});
|
||||
// Immediately dispatch the expected event.
|
||||
eventBus.dispatch("pagerendered");
|
||||
|
||||
const type = await pageRendered;
|
||||
expect(type).toEqual(WaitOnType.EVENT);
|
||||
});
|
||||
|
||||
it("should resolve on timeout, using the EventBus", async function () {
|
||||
const pageRendered = waitOnEventOrTimeout({
|
||||
target: eventBus,
|
||||
name: "pagerendered",
|
||||
delay: 10,
|
||||
});
|
||||
// Do *not* dispatch the event, and wait for the timeout.
|
||||
|
||||
const type = await pageRendered;
|
||||
expect(type).toEqual(WaitOnType.TIMEOUT);
|
||||
});
|
||||
});
|
||||
});
|
||||
149
test/unit/fetch_stream_spec.js
Normal file
149
test/unit/fetch_stream_spec.js
Normal file
@@ -0,0 +1,149 @@
|
||||
/* Copyright 2019 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { AbortException } from "../../src/shared/util.js";
|
||||
import { PDFFetchStream } from "../../src/display/fetch_stream.js";
|
||||
import { testCrossOriginRedirects } from "./common_pdfstream_tests.js";
|
||||
import { TestPdfsServer } from "./test_utils.js";
|
||||
|
||||
describe("fetch_stream", function () {
|
||||
function getPdfUrl() {
|
||||
return TestPdfsServer.resolveURL("tracemonkey.pdf").href;
|
||||
}
|
||||
const pdfLength = 1016315;
|
||||
|
||||
beforeAll(async function () {
|
||||
await TestPdfsServer.ensureStarted();
|
||||
});
|
||||
|
||||
afterAll(async function () {
|
||||
await TestPdfsServer.ensureStopped();
|
||||
});
|
||||
|
||||
it("read with streaming", async function () {
|
||||
const stream = new PDFFetchStream({
|
||||
url: getPdfUrl(),
|
||||
disableStream: false,
|
||||
disableRange: true,
|
||||
});
|
||||
|
||||
const fullReader = stream.getFullReader();
|
||||
|
||||
let isStreamingSupported, isRangeSupported;
|
||||
await fullReader.headersReady.then(function () {
|
||||
isStreamingSupported = fullReader.isStreamingSupported;
|
||||
isRangeSupported = fullReader.isRangeSupported;
|
||||
});
|
||||
|
||||
let len = 0;
|
||||
const read = function () {
|
||||
return fullReader.read().then(function (result) {
|
||||
if (result.done) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
len += result.value.byteLength;
|
||||
return read();
|
||||
});
|
||||
};
|
||||
|
||||
await read();
|
||||
|
||||
expect(len).toEqual(pdfLength);
|
||||
expect(isStreamingSupported).toEqual(true);
|
||||
expect(isRangeSupported).toEqual(false);
|
||||
});
|
||||
|
||||
it("read ranges with streaming", async function () {
|
||||
const rangeSize = 32768;
|
||||
const stream = new PDFFetchStream({
|
||||
url: getPdfUrl(),
|
||||
rangeChunkSize: rangeSize,
|
||||
disableStream: false,
|
||||
disableRange: false,
|
||||
});
|
||||
|
||||
const fullReader = stream.getFullReader();
|
||||
|
||||
let isStreamingSupported, isRangeSupported, fullReaderCancelled;
|
||||
await fullReader.headersReady.then(function () {
|
||||
isStreamingSupported = fullReader.isStreamingSupported;
|
||||
isRangeSupported = fullReader.isRangeSupported;
|
||||
// We shall be able to close full reader without any issue.
|
||||
fullReader.cancel(new AbortException("Don't need fullReader."));
|
||||
fullReaderCancelled = true;
|
||||
});
|
||||
|
||||
const tailSize = pdfLength % rangeSize || rangeSize;
|
||||
const rangeReader1 = stream.getRangeReader(
|
||||
pdfLength - tailSize - rangeSize,
|
||||
pdfLength - tailSize
|
||||
);
|
||||
const rangeReader2 = stream.getRangeReader(pdfLength - tailSize, pdfLength);
|
||||
|
||||
const result1 = { value: 0 },
|
||||
result2 = { value: 0 };
|
||||
const read = function (reader, lenResult) {
|
||||
return reader.read().then(function (result) {
|
||||
if (result.done) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
lenResult.value += result.value.byteLength;
|
||||
return read(reader, lenResult);
|
||||
});
|
||||
};
|
||||
|
||||
await Promise.all([
|
||||
read(rangeReader1, result1),
|
||||
read(rangeReader2, result2),
|
||||
]);
|
||||
|
||||
expect(isStreamingSupported).toEqual(true);
|
||||
expect(isRangeSupported).toEqual(true);
|
||||
expect(fullReaderCancelled).toEqual(true);
|
||||
expect(result1.value).toEqual(rangeSize);
|
||||
expect(result2.value).toEqual(tailSize);
|
||||
});
|
||||
|
||||
describe("Redirects", function () {
|
||||
it("redirects allowed if all responses are same-origin", async function () {
|
||||
await testCrossOriginRedirects({
|
||||
PDFStreamClass: PDFFetchStream,
|
||||
redirectIfRange: false,
|
||||
async testRangeReader(rangeReader) {
|
||||
await expectAsync(rangeReader.read()).toBeResolved();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("redirects blocked if any response is cross-origin", async function () {
|
||||
await testCrossOriginRedirects({
|
||||
PDFStreamClass: PDFFetchStream,
|
||||
redirectIfRange: true,
|
||||
async testRangeReader(rangeReader) {
|
||||
// When read (sync), error should be reported.
|
||||
await expectAsync(rangeReader.read()).toBeRejectedWithError(
|
||||
/^Expected range response-origin "http:.*" to match "http:.*"\.$/
|
||||
);
|
||||
// When read again (async), error should be consistent.
|
||||
await expectAsync(rangeReader.read()).toBeRejectedWithError(
|
||||
/^Expected range response-origin "http:.*" to match "http:.*"\.$/
|
||||
);
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
419
test/unit/font_substitutions_spec.js
Normal file
419
test/unit/font_substitutions_spec.js
Normal file
@@ -0,0 +1,419 @@
|
||||
/* Copyright 2022 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { createIdFactory } from "./test_utils.js";
|
||||
import { getFontSubstitution } from "../../src/core/font_substitutions.js";
|
||||
|
||||
describe("getFontSubstitution", function () {
|
||||
const idFactory = createIdFactory(0);
|
||||
const localFontPath = "/tmp/";
|
||||
|
||||
it("should substitute an unknown font", () => {
|
||||
const fontName = "Foo";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: true,
|
||||
baseFontName: "Foo",
|
||||
src: "local(Foo)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "normal",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+)$/);
|
||||
});
|
||||
|
||||
it("should substitute an unknown font subset", () => {
|
||||
const fontName = "ABCDEF+Foo";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: true,
|
||||
baseFontName: "Foo",
|
||||
src: "local(Foo)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "normal",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+)$/);
|
||||
});
|
||||
|
||||
it("should substitute an unknown bold font", () => {
|
||||
const fontName = "Foo-Bold";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: true,
|
||||
baseFontName: "Foo-Bold",
|
||||
src: "local(Foo-Bold)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "bold",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+)$/);
|
||||
});
|
||||
|
||||
it("should substitute an unknown italic font", () => {
|
||||
const fontName = "Foo-Italic";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: true,
|
||||
baseFontName: "Foo-Italic",
|
||||
src: "local(Foo-Italic)",
|
||||
style: {
|
||||
style: "italic",
|
||||
weight: "normal",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+)$/);
|
||||
});
|
||||
|
||||
it("should substitute an unknown bold italic font", () => {
|
||||
const fontName = "Foo-BoldItalic";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: true,
|
||||
baseFontName: "Foo-BoldItalic",
|
||||
src: "local(Foo-BoldItalic)",
|
||||
style: {
|
||||
style: "italic",
|
||||
weight: "bold",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+)$/);
|
||||
});
|
||||
|
||||
it("should substitute an unknown font but with a standard font", () => {
|
||||
const fontName = "Foo";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
"Helvetica",
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: false,
|
||||
baseFontName: "Foo",
|
||||
src:
|
||||
"local(Foo),local(Helvetica),local(Helvetica Neue)," +
|
||||
"local(Arial),local(Arial Nova),local(Liberation Sans)," +
|
||||
"local(Arimo),local(Nimbus Sans),local(Nimbus Sans L)," +
|
||||
"local(A030),local(TeX Gyre Heros),local(FreeSans)," +
|
||||
"local(DejaVu Sans),local(Albany),local(Bitstream Vera Sans)," +
|
||||
"local(Arial Unicode MS),local(Microsoft Sans Serif)," +
|
||||
"local(Apple Symbols),local(Cantarell)," +
|
||||
"url(/tmp/LiberationSans-Regular.ttf)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "normal",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+),sans-serif$/);
|
||||
});
|
||||
|
||||
it("should substitute an unknown font but with a standard italic font", () => {
|
||||
const fontName = "Foo-Italic";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
"Helvetica-Oblique",
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: false,
|
||||
baseFontName: "Foo-Italic",
|
||||
src:
|
||||
"local(Foo-Italic),local(Helvetica Italic)," +
|
||||
"local(Helvetica Neue Italic),local(Arial Italic)," +
|
||||
"local(Arial Nova Italic),local(Liberation Sans Italic)," +
|
||||
"local(Arimo Italic),local(Nimbus Sans Italic)," +
|
||||
"local(Nimbus Sans L Italic),local(A030 Italic)," +
|
||||
"local(TeX Gyre Heros Italic),local(FreeSans Italic)," +
|
||||
"local(DejaVu Sans Italic),local(Albany Italic)," +
|
||||
"local(Bitstream Vera Sans Italic),local(Arial Unicode MS Italic)," +
|
||||
"local(Microsoft Sans Serif Italic),local(Apple Symbols Italic)," +
|
||||
"local(Cantarell Italic),url(/tmp/LiberationSans-Italic.ttf)",
|
||||
style: {
|
||||
style: "italic",
|
||||
weight: "normal",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+),sans-serif$/);
|
||||
});
|
||||
|
||||
it("should substitute an unknown font but with a standard bold font", () => {
|
||||
const fontName = "Foo-Bold";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
"Helvetica-Bold",
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: false,
|
||||
baseFontName: "Foo-Bold",
|
||||
src:
|
||||
"local(Foo-Bold),local(Helvetica Bold),local(Helvetica Neue Bold)," +
|
||||
"local(Arial Bold),local(Arial Nova Bold)," +
|
||||
"local(Liberation Sans Bold),local(Arimo Bold)," +
|
||||
"local(Nimbus Sans Bold),local(Nimbus Sans L Bold)," +
|
||||
"local(A030 Bold),local(TeX Gyre Heros Bold),local(FreeSans Bold)," +
|
||||
"local(DejaVu Sans Bold),local(Albany Bold)," +
|
||||
"local(Bitstream Vera Sans Bold),local(Arial Unicode MS Bold)," +
|
||||
"local(Microsoft Sans Serif Bold),local(Apple Symbols Bold)," +
|
||||
"local(Cantarell Bold),url(/tmp/LiberationSans-Bold.ttf)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "bold",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+),sans-serif$/);
|
||||
});
|
||||
|
||||
it("should substitute an unknown font but with a standard bold italic font", () => {
|
||||
const fontName = "Foo-BoldItalic";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
"Helvetica-BoldOblique",
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: false,
|
||||
baseFontName: "Foo-BoldItalic",
|
||||
src:
|
||||
"local(Foo-BoldItalic),local(Helvetica Bold Italic)," +
|
||||
"local(Helvetica Neue Bold Italic),local(Arial Bold Italic)," +
|
||||
"local(Arial Nova Bold Italic),local(Liberation Sans Bold Italic)," +
|
||||
"local(Arimo Bold Italic),local(Nimbus Sans Bold Italic)," +
|
||||
"local(Nimbus Sans L Bold Italic),local(A030 Bold Italic)," +
|
||||
"local(TeX Gyre Heros Bold Italic),local(FreeSans Bold Italic)," +
|
||||
"local(DejaVu Sans Bold Italic),local(Albany Bold Italic)," +
|
||||
"local(Bitstream Vera Sans Bold Italic)," +
|
||||
"local(Arial Unicode MS Bold Italic)," +
|
||||
"local(Microsoft Sans Serif Bold Italic)," +
|
||||
"local(Apple Symbols Bold Italic),local(Cantarell Bold Italic)," +
|
||||
"url(/tmp/LiberationSans-BoldItalic.ttf)",
|
||||
style: {
|
||||
style: "italic",
|
||||
weight: "bold",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(/^"Foo",g_d(\d+)_sf(\d+),sans-serif$/);
|
||||
});
|
||||
|
||||
it("should substitute Calibri", () => {
|
||||
const fontName = "Calibri";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: false,
|
||||
baseFontName: "Calibri",
|
||||
src:
|
||||
"local(Calibri),local(Carlito),local(Helvetica)," +
|
||||
"local(Helvetica Neue),local(Arial),local(Arial Nova)," +
|
||||
"local(Liberation Sans),local(Arimo),local(Nimbus Sans)," +
|
||||
"local(Nimbus Sans L),local(A030),local(TeX Gyre Heros)," +
|
||||
"local(FreeSans),local(DejaVu Sans),local(Albany)," +
|
||||
"local(Bitstream Vera Sans),local(Arial Unicode MS)," +
|
||||
"local(Microsoft Sans Serif),local(Apple Symbols)," +
|
||||
"local(Cantarell),url(/tmp/LiberationSans-Regular.ttf)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "normal",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(
|
||||
/^"Calibri",g_d(\d+)_sf(\d+),sans-serif$/
|
||||
);
|
||||
});
|
||||
|
||||
it("should substitute Calibri-Bold", () => {
|
||||
const fontName = "Calibri-Bold";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: false,
|
||||
baseFontName: "Calibri-Bold",
|
||||
src:
|
||||
"local(Calibri Bold),local(Carlito Bold),local(Helvetica Bold)," +
|
||||
"local(Helvetica Neue Bold),local(Arial Bold)," +
|
||||
"local(Arial Nova Bold),local(Liberation Sans Bold)," +
|
||||
"local(Arimo Bold),local(Nimbus Sans Bold)," +
|
||||
"local(Nimbus Sans L Bold),local(A030 Bold)," +
|
||||
"local(TeX Gyre Heros Bold),local(FreeSans Bold)," +
|
||||
"local(DejaVu Sans Bold),local(Albany Bold)," +
|
||||
"local(Bitstream Vera Sans Bold),local(Arial Unicode MS Bold)," +
|
||||
"local(Microsoft Sans Serif Bold),local(Apple Symbols Bold)," +
|
||||
"local(Cantarell Bold),url(/tmp/LiberationSans-Bold.ttf)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "bold",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(
|
||||
/^"Calibri",g_d(\d+)_sf(\d+),sans-serif$/
|
||||
);
|
||||
});
|
||||
|
||||
it("should substitute Arial Black", () => {
|
||||
const fontName = "Arial Black";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: false,
|
||||
baseFontName: "ArialBlack",
|
||||
src:
|
||||
"local(Arial Black),local(Helvetica Bold)," +
|
||||
"local(Helvetica Neue Bold),local(Arial Bold)," +
|
||||
"local(Arial Nova Bold),local(Liberation Sans Bold)," +
|
||||
"local(Arimo Bold),local(Nimbus Sans Bold)," +
|
||||
"local(Nimbus Sans L Bold),local(A030 Bold)," +
|
||||
"local(TeX Gyre Heros Bold),local(FreeSans Bold)," +
|
||||
"local(DejaVu Sans Bold),local(Albany Bold)," +
|
||||
"local(Bitstream Vera Sans Bold),local(Arial Unicode MS Bold)," +
|
||||
"local(Microsoft Sans Serif Bold),local(Apple Symbols Bold)," +
|
||||
"local(Cantarell Bold),url(/tmp/LiberationSans-Bold.ttf)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "900",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(
|
||||
/^"ArialBlack",g_d(\d+)_sf(\d+),sans-serif$/
|
||||
);
|
||||
});
|
||||
|
||||
it("should substitute Arial Black Bold", () => {
|
||||
const fontName = "Arial-Black-Bold";
|
||||
const fontSubstitution = getFontSubstitution(
|
||||
new Map(),
|
||||
idFactory,
|
||||
localFontPath,
|
||||
fontName,
|
||||
undefined,
|
||||
"TrueType"
|
||||
);
|
||||
expect(fontSubstitution).toEqual(
|
||||
jasmine.objectContaining({
|
||||
guessFallback: false,
|
||||
baseFontName: "ArialBlack-Bold",
|
||||
src:
|
||||
"local(Arial Black),local(Helvetica Bold)," +
|
||||
"local(Helvetica Neue Bold),local(Arial Bold)," +
|
||||
"local(Arial Nova Bold),local(Liberation Sans Bold)," +
|
||||
"local(Arimo Bold),local(Nimbus Sans Bold)," +
|
||||
"local(Nimbus Sans L Bold),local(A030 Bold)," +
|
||||
"local(TeX Gyre Heros Bold),local(FreeSans Bold)," +
|
||||
"local(DejaVu Sans Bold),local(Albany Bold)," +
|
||||
"local(Bitstream Vera Sans Bold),local(Arial Unicode MS Bold)," +
|
||||
"local(Microsoft Sans Serif Bold),local(Apple Symbols Bold)," +
|
||||
"local(Cantarell Bold),url(/tmp/LiberationSans-Bold.ttf)",
|
||||
style: {
|
||||
style: "normal",
|
||||
weight: "900",
|
||||
},
|
||||
})
|
||||
);
|
||||
expect(fontSubstitution.css).toMatch(
|
||||
/^"ArialBlack",g_d(\d+)_sf(\d+),sans-serif$/
|
||||
);
|
||||
});
|
||||
});
|
||||
583
test/unit/function_spec.js
Normal file
583
test/unit/function_spec.js
Normal file
@@ -0,0 +1,583 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
PostScriptCompiler,
|
||||
PostScriptEvaluator,
|
||||
} from "../../src/core/function.js";
|
||||
import { PostScriptLexer, PostScriptParser } from "../../src/core/ps_parser.js";
|
||||
import { StringStream } from "../../src/core/stream.js";
|
||||
|
||||
describe("function", function () {
|
||||
describe("PostScriptParser", function () {
|
||||
function parse(program) {
|
||||
const stream = new StringStream(program);
|
||||
const parser = new PostScriptParser(new PostScriptLexer(stream));
|
||||
return parser.parse();
|
||||
}
|
||||
it("parses empty programs", function () {
|
||||
const output = parse("{}");
|
||||
expect(output.length).toEqual(0);
|
||||
});
|
||||
it("parses positive numbers", function () {
|
||||
const number = 999;
|
||||
const program = parse("{ " + number + " }");
|
||||
const expectedProgram = [number];
|
||||
expect(program).toEqual(expectedProgram);
|
||||
});
|
||||
it("parses negative numbers", function () {
|
||||
const number = -999;
|
||||
const program = parse("{ " + number + " }");
|
||||
const expectedProgram = [number];
|
||||
expect(program).toEqual(expectedProgram);
|
||||
});
|
||||
it("parses negative floats", function () {
|
||||
const number = 3.3;
|
||||
const program = parse("{ " + number + " }");
|
||||
const expectedProgram = [number];
|
||||
expect(program).toEqual(expectedProgram);
|
||||
});
|
||||
it("parses operators", function () {
|
||||
const program = parse("{ sub }");
|
||||
const expectedProgram = ["sub"];
|
||||
expect(program).toEqual(expectedProgram);
|
||||
});
|
||||
it("parses if statements", function () {
|
||||
const program = parse("{ { 99 } if }");
|
||||
const expectedProgram = [3, "jz", 99];
|
||||
expect(program).toEqual(expectedProgram);
|
||||
});
|
||||
it("parses ifelse statements", function () {
|
||||
const program = parse("{ { 99 } { 44 } ifelse }");
|
||||
const expectedProgram = [5, "jz", 99, 6, "j", 44];
|
||||
expect(program).toEqual(expectedProgram);
|
||||
});
|
||||
it("handles missing brackets", function () {
|
||||
expect(function () {
|
||||
parse("{");
|
||||
}).toThrow(new Error("Unexpected symbol: found undefined expected 1."));
|
||||
});
|
||||
it("handles junk after the end", function () {
|
||||
const number = 3.3;
|
||||
const program = parse("{ " + number + " }#");
|
||||
const expectedProgram = [number];
|
||||
expect(program).toEqual(expectedProgram);
|
||||
});
|
||||
});
|
||||
|
||||
describe("PostScriptEvaluator", function () {
|
||||
function evaluate(program) {
|
||||
const stream = new StringStream(program);
|
||||
const parser = new PostScriptParser(new PostScriptLexer(stream));
|
||||
const code = parser.parse();
|
||||
const evaluator = new PostScriptEvaluator(code);
|
||||
const output = evaluator.execute();
|
||||
return output;
|
||||
}
|
||||
|
||||
it("pushes stack", function () {
|
||||
const stack = evaluate("{ 99 }");
|
||||
const expectedStack = [99];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles if with true", function () {
|
||||
const stack = evaluate("{ 1 {99} if }");
|
||||
const expectedStack = [99];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles if with false", function () {
|
||||
const stack = evaluate("{ 0 {99} if }");
|
||||
const expectedStack = [];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles ifelse with true", function () {
|
||||
const stack = evaluate("{ 1 {99} {77} ifelse }");
|
||||
const expectedStack = [99];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles ifelse with false", function () {
|
||||
const stack = evaluate("{ 0 {99} {77} ifelse }");
|
||||
const expectedStack = [77];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles nested if", function () {
|
||||
const stack = evaluate("{ 1 {1 {77} if} if }");
|
||||
const expectedStack = [77];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
|
||||
it("abs", function () {
|
||||
const stack = evaluate("{ -2 abs }");
|
||||
const expectedStack = [2];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("adds", function () {
|
||||
const stack = evaluate("{ 1 2 add }");
|
||||
const expectedStack = [3];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("boolean and", function () {
|
||||
const stack = evaluate("{ true false and }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("bitwise and", function () {
|
||||
const stack = evaluate("{ 254 1 and }");
|
||||
const expectedStack = [254 & 1];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("the angle in degrees (0-360) whose tangent is num/den.", function () {
|
||||
const stack = evaluate("{ 1 -1 atan }");
|
||||
const expectedStack = [135];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles bitshifting ", function () {
|
||||
const stack = evaluate("{ 50 2 bitshift }");
|
||||
const expectedStack = [200];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates the ceiling value", function () {
|
||||
const stack = evaluate("{ 9.9 ceiling }");
|
||||
const expectedStack = [10];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("copies", function () {
|
||||
const stack = evaluate("{ 99 98 2 copy }");
|
||||
const expectedStack = [99, 98, 99, 98];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates the cosine of an angle in degrees", function () {
|
||||
const stack = evaluate("{ 180 cos }");
|
||||
const expectedStack = [-1];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("converts to int", function () {
|
||||
const stack = evaluate("{ 9.9 cvi }");
|
||||
const expectedStack = [9];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("converts negatives to int", function () {
|
||||
const stack = evaluate("{ -9.9 cvi }");
|
||||
const expectedStack = [-9];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("converts to real", function () {
|
||||
const stack = evaluate("{ 55.34 cvr }");
|
||||
const expectedStack = [55.34];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("divides", function () {
|
||||
const stack = evaluate("{ 6 5 div }");
|
||||
const expectedStack = [1.2];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("maps division by zero to infinity", function () {
|
||||
const stack = evaluate("{ 6 0 div }");
|
||||
const expectedStack = [Infinity];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("duplicates", function () {
|
||||
const stack = evaluate("{ 99 dup }");
|
||||
const expectedStack = [99, 99];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("accepts an equality", function () {
|
||||
const stack = evaluate("{ 9 9 eq }");
|
||||
const expectedStack = [true];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rejects an inequality", function () {
|
||||
const stack = evaluate("{ 9 8 eq }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("exchanges", function () {
|
||||
const stack = evaluate("{ 44 99 exch }");
|
||||
const expectedStack = [99, 44];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles exponentiation", function () {
|
||||
const stack = evaluate("{ 10 2 exp }");
|
||||
const expectedStack = [100];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("pushes false onto the stack", function () {
|
||||
const stack = evaluate("{ false }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates the floor value", function () {
|
||||
const stack = evaluate("{ 9.9 floor }");
|
||||
const expectedStack = [9];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles greater than or equal to", function () {
|
||||
const stack = evaluate("{ 10 9 ge }");
|
||||
const expectedStack = [true];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rejects less than for greater than or equal to", function () {
|
||||
const stack = evaluate("{ 8 9 ge }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles greater than", function () {
|
||||
const stack = evaluate("{ 10 9 gt }");
|
||||
const expectedStack = [true];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rejects less than or equal for greater than", function () {
|
||||
const stack = evaluate("{ 9 9 gt }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("divides to integer", function () {
|
||||
const stack = evaluate("{ 2 3 idiv }");
|
||||
const expectedStack = [0];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("divides to negative integer", function () {
|
||||
const stack = evaluate("{ -2 3 idiv }");
|
||||
const expectedStack = [0];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("duplicates index", function () {
|
||||
const stack = evaluate("{ 4 3 2 1 2 index }");
|
||||
const expectedStack = [4, 3, 2, 1, 3];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles less than or equal to", function () {
|
||||
const stack = evaluate("{ 9 10 le }");
|
||||
const expectedStack = [true];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rejects greater than for less than or equal to", function () {
|
||||
const stack = evaluate("{ 10 9 le }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates the natural logarithm", function () {
|
||||
const stack = evaluate("{ 10 ln }");
|
||||
const expectedStack = [Math.log(10)];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates the base 10 logarithm", function () {
|
||||
const stack = evaluate("{ 100 log }");
|
||||
const expectedStack = [2];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("handles less than", function () {
|
||||
const stack = evaluate("{ 9 10 lt }");
|
||||
const expectedStack = [true];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rejects greater than or equal to for less than", function () {
|
||||
const stack = evaluate("{ 10 9 lt }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("performs the modulo operation", function () {
|
||||
const stack = evaluate("{ 4 3 mod }");
|
||||
const expectedStack = [1];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("multiplies two numbers (positive result)", function () {
|
||||
const stack = evaluate("{ 9 8 mul }");
|
||||
const expectedStack = [72];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("multiplies two numbers (negative result)", function () {
|
||||
const stack = evaluate("{ 9 -8 mul }");
|
||||
const expectedStack = [-72];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("accepts an inequality", function () {
|
||||
const stack = evaluate("{ 9 8 ne }");
|
||||
const expectedStack = [true];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rejects an equality", function () {
|
||||
const stack = evaluate("{ 9 9 ne }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("negates", function () {
|
||||
const stack = evaluate("{ 4.5 neg }");
|
||||
const expectedStack = [-4.5];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("boolean not", function () {
|
||||
const stack = evaluate("{ true not }");
|
||||
const expectedStack = [false];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("bitwise not", function () {
|
||||
const stack = evaluate("{ 12 not }");
|
||||
const expectedStack = [-13];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("boolean or", function () {
|
||||
const stack = evaluate("{ true false or }");
|
||||
const expectedStack = [true];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("bitwise or", function () {
|
||||
const stack = evaluate("{ 254 1 or }");
|
||||
const expectedStack = [254 | 1];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("pops stack", function () {
|
||||
const stack = evaluate("{ 1 2 pop }");
|
||||
const expectedStack = [1];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rolls stack right", function () {
|
||||
const stack = evaluate("{ 1 3 2 2 4 1 roll }");
|
||||
const expectedStack = [2, 1, 3, 2];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rolls stack left", function () {
|
||||
const stack = evaluate("{ 1 3 2 2 4 -1 roll }");
|
||||
const expectedStack = [3, 2, 2, 1];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("rounds a number", function () {
|
||||
const stack = evaluate("{ 9.52 round }");
|
||||
const expectedStack = [10];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates the sine of an angle in degrees", function () {
|
||||
const stack = evaluate("{ 90 sin }");
|
||||
const expectedStack = [1];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates a square root (integer)", function () {
|
||||
const stack = evaluate("{ 100 sqrt }");
|
||||
const expectedStack = [10];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates a square root (float)", function () {
|
||||
const stack = evaluate("{ 99 sqrt }");
|
||||
const expectedStack = [Math.sqrt(99)];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("subtracts (positive result)", function () {
|
||||
const stack = evaluate("{ 6 4 sub }");
|
||||
const expectedStack = [2];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("subtracts (negative result)", function () {
|
||||
const stack = evaluate("{ 4 6 sub }");
|
||||
const expectedStack = [-2];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("pushes true onto the stack", function () {
|
||||
const stack = evaluate("{ true }");
|
||||
const expectedStack = [true];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("truncates a number", function () {
|
||||
const stack = evaluate("{ 35.004 truncate }");
|
||||
const expectedStack = [35];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
it("calculates an exclusive or value", function () {
|
||||
const stack = evaluate("{ 3 9 xor }");
|
||||
const expectedStack = [10];
|
||||
expect(stack).toEqual(expectedStack);
|
||||
});
|
||||
});
|
||||
|
||||
describe("PostScriptCompiler", function () {
|
||||
function check(code, domain, range, samples) {
|
||||
const compiler = new PostScriptCompiler();
|
||||
const compiledCode = compiler.compile(code, domain, range);
|
||||
if (samples === null) {
|
||||
expect(compiledCode).toBeNull();
|
||||
} else {
|
||||
expect(compiledCode).not.toBeNull();
|
||||
// eslint-disable-next-line no-new-func
|
||||
const fn = new Function(
|
||||
"src",
|
||||
"srcOffset",
|
||||
"dest",
|
||||
"destOffset",
|
||||
compiledCode
|
||||
);
|
||||
for (const { input, output } of samples) {
|
||||
const out = new Float32Array(output.length);
|
||||
fn(input, 0, out, 0);
|
||||
expect(Array.from(out)).toEqual(output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it("check compiled add", function () {
|
||||
check([0.25, 0.5, "add"], [], [0, 1], [{ input: [], output: [0.75] }]);
|
||||
check([0, "add"], [0, 1], [0, 1], [{ input: [0.25], output: [0.25] }]);
|
||||
check([0.5, "add"], [0, 1], [0, 1], [{ input: [0.25], output: [0.75] }]);
|
||||
check(
|
||||
[0, "exch", "add"],
|
||||
[0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.25], output: [0.25] }]
|
||||
);
|
||||
check(
|
||||
[0.5, "exch", "add"],
|
||||
[0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.25], output: [0.75] }]
|
||||
);
|
||||
check(
|
||||
["add"],
|
||||
[0, 1, 0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.25, 0.5], output: [0.75] }]
|
||||
);
|
||||
check(["add"], [0, 1], [0, 1], null);
|
||||
});
|
||||
it("check compiled sub", function () {
|
||||
check([0.5, 0.25, "sub"], [], [0, 1], [{ input: [], output: [0.25] }]);
|
||||
check([0, "sub"], [0, 1], [0, 1], [{ input: [0.25], output: [0.25] }]);
|
||||
check([0.5, "sub"], [0, 1], [0, 1], [{ input: [0.75], output: [0.25] }]);
|
||||
check(
|
||||
[0, "exch", "sub"],
|
||||
[0, 1],
|
||||
[-1, 1],
|
||||
[{ input: [0.25], output: [-0.25] }]
|
||||
);
|
||||
check(
|
||||
[0.75, "exch", "sub"],
|
||||
[0, 1],
|
||||
[-1, 1],
|
||||
[{ input: [0.25], output: [0.5] }]
|
||||
);
|
||||
check(
|
||||
["sub"],
|
||||
[0, 1, 0, 1],
|
||||
[-1, 1],
|
||||
[{ input: [0.25, 0.5], output: [-0.25] }]
|
||||
);
|
||||
check(["sub"], [0, 1], [0, 1], null);
|
||||
|
||||
check(
|
||||
[1, "dup", 3, 2, "roll", "sub", "sub"],
|
||||
[0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.75], output: [0.75] }]
|
||||
);
|
||||
});
|
||||
it("check compiled mul", function () {
|
||||
check([0.25, 0.5, "mul"], [], [0, 1], [{ input: [], output: [0.125] }]);
|
||||
check([0, "mul"], [0, 1], [0, 1], [{ input: [0.25], output: [0] }]);
|
||||
check([0.5, "mul"], [0, 1], [0, 1], [{ input: [0.25], output: [0.125] }]);
|
||||
check([1, "mul"], [0, 1], [0, 1], [{ input: [0.25], output: [0.25] }]);
|
||||
check(
|
||||
[0, "exch", "mul"],
|
||||
[0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.25], output: [0] }]
|
||||
);
|
||||
check(
|
||||
[0.5, "exch", "mul"],
|
||||
[0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.25], output: [0.125] }]
|
||||
);
|
||||
check(
|
||||
[1, "exch", "mul"],
|
||||
[0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.25], output: [0.25] }]
|
||||
);
|
||||
check(
|
||||
["mul"],
|
||||
[0, 1, 0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.25, 0.5], output: [0.125] }]
|
||||
);
|
||||
check(["mul"], [0, 1], [0, 1], null);
|
||||
});
|
||||
it("check compiled max", function () {
|
||||
check(
|
||||
["dup", 0.75, "gt", 7, "jz", "pop", 0.75],
|
||||
[0, 1],
|
||||
[0, 1],
|
||||
[{ input: [0.5], output: [0.5] }]
|
||||
);
|
||||
check(
|
||||
["dup", 0.75, "gt", 7, "jz", "pop", 0.75],
|
||||
[0, 1],
|
||||
[0, 1],
|
||||
[{ input: [1], output: [0.75] }]
|
||||
);
|
||||
check(["dup", 0.75, "gt", 5, "jz", "pop", 0.75], [0, 1], [0, 1], null);
|
||||
});
|
||||
it("check pop/roll/index", function () {
|
||||
check([1, "pop"], [0, 1], [0, 1], [{ input: [0.5], output: [0.5] }]);
|
||||
check(
|
||||
[1, 3, -1, "roll"],
|
||||
[0, 1, 0, 1],
|
||||
[0, 1, 0, 1, 0, 1],
|
||||
[{ input: [0.25, 0.5], output: [0.5, 1, 0.25] }]
|
||||
);
|
||||
check(
|
||||
[1, 3, 1, "roll"],
|
||||
[0, 1, 0, 1],
|
||||
[0, 1, 0, 1, 0, 1],
|
||||
[{ input: [0.25, 0.5], output: [1, 0.25, 0.5] }]
|
||||
);
|
||||
check([1, 3, 1.5, "roll"], [0, 1, 0, 1], [0, 1, 0, 1, 0, 1], null);
|
||||
check(
|
||||
[1, 1, "index"],
|
||||
[0, 1],
|
||||
[0, 1, 0, 1, 0, 1],
|
||||
[{ input: [0.5], output: [0.5, 1, 0.5] }]
|
||||
);
|
||||
check([1, 3, "index", "pop"], [0, 1], [0, 1], null);
|
||||
check([1, 0.5, "index", "pop"], [0, 1], [0, 1], null);
|
||||
});
|
||||
it("check input boundaries", function () {
|
||||
check([], [0, 0.5], [0, 1], [{ input: [1], output: [0.5] }]);
|
||||
check([], [0.5, 1], [0, 1], [{ input: [0], output: [0.5] }]);
|
||||
check(
|
||||
["dup"],
|
||||
[0.5, 0.75],
|
||||
[0, 1, 0, 1],
|
||||
[{ input: [0], output: [0.5, 0.5] }]
|
||||
);
|
||||
check([], [100, 1001], [0, 10000], [{ input: [1000], output: [1000] }]);
|
||||
});
|
||||
it("check output boundaries", function () {
|
||||
check([], [0, 1], [0, 0.5], [{ input: [1], output: [0.5] }]);
|
||||
check([], [0, 1], [0.5, 1], [{ input: [0], output: [0.5] }]);
|
||||
check(
|
||||
["dup"],
|
||||
[0, 1],
|
||||
[0.5, 1, 0.75, 1],
|
||||
[{ input: [0], output: [0.5, 0.75] }]
|
||||
);
|
||||
check([], [0, 10000], [100, 1001], [{ input: [1000], output: [1000] }]);
|
||||
});
|
||||
it("compile optimized", function () {
|
||||
const compiler = new PostScriptCompiler();
|
||||
const code = [0, "add", 1, 1, 3, -1, "roll", "sub", "sub", 1, "mul"];
|
||||
const compiledCode = compiler.compile(code, [0, 1], [0, 1]);
|
||||
expect(compiledCode).toEqual(
|
||||
"dest[destOffset + 0] = Math.max(0, Math.min(1, src[srcOffset + 0]));"
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
218
test/unit/jasmine-boot.js
Normal file
218
test/unit/jasmine-boot.js
Normal file
@@ -0,0 +1,218 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/*
|
||||
Copyright (c) 2008-2016 Pivotal Labs
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
/* globals jasmineRequire */
|
||||
|
||||
// Modified jasmine's boot.js file to load PDF.js libraries async.
|
||||
|
||||
"use strict";
|
||||
|
||||
import { GlobalWorkerOptions } from "pdfjs/display/worker_options.js";
|
||||
import { isNodeJS } from "../../src/shared/util.js";
|
||||
import { TestReporter } from "../reporter.js";
|
||||
|
||||
async function initializePDFJS(callback) {
|
||||
await Promise.all(
|
||||
[
|
||||
"pdfjs-test/unit/annotation_spec.js",
|
||||
"pdfjs-test/unit/annotation_storage_spec.js",
|
||||
"pdfjs-test/unit/api_spec.js",
|
||||
"pdfjs-test/unit/app_options_spec.js",
|
||||
"pdfjs-test/unit/autolinker_spec.js",
|
||||
"pdfjs-test/unit/bidi_spec.js",
|
||||
"pdfjs-test/unit/bin_font_info_spec.js",
|
||||
"pdfjs-test/unit/canvas_factory_spec.js",
|
||||
"pdfjs-test/unit/cff_parser_spec.js",
|
||||
"pdfjs-test/unit/cmap_spec.js",
|
||||
"pdfjs-test/unit/colorspace_spec.js",
|
||||
"pdfjs-test/unit/core_utils_spec.js",
|
||||
"pdfjs-test/unit/crypto_spec.js",
|
||||
"pdfjs-test/unit/custom_spec.js",
|
||||
"pdfjs-test/unit/default_appearance_spec.js",
|
||||
"pdfjs-test/unit/display_utils_spec.js",
|
||||
"pdfjs-test/unit/document_spec.js",
|
||||
"pdfjs-test/unit/editor_spec.js",
|
||||
"pdfjs-test/unit/encodings_spec.js",
|
||||
"pdfjs-test/unit/evaluator_spec.js",
|
||||
"pdfjs-test/unit/event_utils_spec.js",
|
||||
"pdfjs-test/unit/fetch_stream_spec.js",
|
||||
"pdfjs-test/unit/font_substitutions_spec.js",
|
||||
"pdfjs-test/unit/function_spec.js",
|
||||
"pdfjs-test/unit/message_handler_spec.js",
|
||||
"pdfjs-test/unit/metadata_spec.js",
|
||||
"pdfjs-test/unit/murmurhash3_spec.js",
|
||||
"pdfjs-test/unit/network_spec.js",
|
||||
"pdfjs-test/unit/network_utils_spec.js",
|
||||
"pdfjs-test/unit/parser_spec.js",
|
||||
"pdfjs-test/unit/pdf.image_decoders_spec.js",
|
||||
"pdfjs-test/unit/pdf.worker_spec.js",
|
||||
"pdfjs-test/unit/pdf_find_controller_spec.js",
|
||||
"pdfjs-test/unit/pdf_find_utils_spec.js",
|
||||
"pdfjs-test/unit/pdf_history_spec.js",
|
||||
"pdfjs-test/unit/pdf_spec.js",
|
||||
"pdfjs-test/unit/pdf_viewer.component_spec.js",
|
||||
"pdfjs-test/unit/pdf_viewer_spec.js",
|
||||
"pdfjs-test/unit/primitives_spec.js",
|
||||
"pdfjs-test/unit/scripting_spec.js",
|
||||
"pdfjs-test/unit/stream_spec.js",
|
||||
"pdfjs-test/unit/struct_tree_spec.js",
|
||||
"pdfjs-test/unit/svg_factory_spec.js",
|
||||
"pdfjs-test/unit/text_layer_spec.js",
|
||||
"pdfjs-test/unit/type1_parser_spec.js",
|
||||
"pdfjs-test/unit/ui_utils_spec.js",
|
||||
"pdfjs-test/unit/unicode_spec.js",
|
||||
"pdfjs-test/unit/util_spec.js",
|
||||
"pdfjs-test/unit/writer_spec.js",
|
||||
"pdfjs-test/unit/xfa_formcalc_spec.js",
|
||||
"pdfjs-test/unit/xfa_parser_spec.js",
|
||||
"pdfjs-test/unit/xfa_serialize_data_spec.js",
|
||||
"pdfjs-test/unit/xfa_tohtml_spec.js",
|
||||
"pdfjs-test/unit/xml_spec.js",
|
||||
].map(moduleName => import(moduleName)) // eslint-disable-line no-unsanitized/method
|
||||
);
|
||||
|
||||
if (isNodeJS) {
|
||||
throw new Error(
|
||||
"The `gulp unittest` command cannot be used in Node.js environments."
|
||||
);
|
||||
}
|
||||
// Configure the worker.
|
||||
GlobalWorkerOptions.workerSrc = "../../build/generic/build/pdf.worker.mjs";
|
||||
|
||||
callback();
|
||||
}
|
||||
|
||||
(function () {
|
||||
window.jasmine = jasmineRequire.core(jasmineRequire);
|
||||
|
||||
jasmineRequire.html(jasmine);
|
||||
|
||||
const env = jasmine.getEnv();
|
||||
|
||||
const jasmineInterface = jasmineRequire.interface(jasmine, env);
|
||||
extend(window, jasmineInterface);
|
||||
|
||||
// Runner Parameters
|
||||
const queryString = new jasmine.QueryString({
|
||||
getWindowLocation() {
|
||||
return window.location;
|
||||
},
|
||||
});
|
||||
|
||||
const config = {
|
||||
failFast: queryString.getParam("failFast"),
|
||||
oneFailurePerSpec: queryString.getParam("oneFailurePerSpec"),
|
||||
hideDisabled: queryString.getParam("hideDisabled"),
|
||||
};
|
||||
|
||||
const random = queryString.getParam("random");
|
||||
if (random !== undefined && random !== "") {
|
||||
config.random = random;
|
||||
}
|
||||
|
||||
const seed = queryString.getParam("seed");
|
||||
if (seed) {
|
||||
config.seed = seed;
|
||||
}
|
||||
|
||||
// Reporters
|
||||
const htmlReporter = new jasmine.HtmlReporter({
|
||||
env,
|
||||
navigateWithNewParam(key, value) {
|
||||
return queryString.navigateWithNewParam(key, value);
|
||||
},
|
||||
addToExistingQueryString(key, value) {
|
||||
return queryString.fullStringWithNewParam(key, value);
|
||||
},
|
||||
getContainer() {
|
||||
return document.body;
|
||||
},
|
||||
createElement() {
|
||||
return document.createElement(...arguments);
|
||||
},
|
||||
createTextNode() {
|
||||
return document.createTextNode(...arguments);
|
||||
},
|
||||
timer: new jasmine.Timer(),
|
||||
});
|
||||
|
||||
env.addReporter(htmlReporter);
|
||||
|
||||
if (queryString.getParam("browser")) {
|
||||
const testReporter = new TestReporter(queryString.getParam("browser"));
|
||||
env.addReporter(testReporter);
|
||||
}
|
||||
|
||||
// Filter which specs will be run by matching the start of the full name
|
||||
// against the `spec` query param.
|
||||
const specFilter = new jasmine.HtmlSpecFilter({
|
||||
filterString() {
|
||||
return queryString.getParam("spec");
|
||||
},
|
||||
});
|
||||
|
||||
config.specFilter = function (spec) {
|
||||
return specFilter.matches(spec.getFullName());
|
||||
};
|
||||
|
||||
env.configure(config);
|
||||
|
||||
// Sets longer timeout.
|
||||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 30000;
|
||||
|
||||
function extend(destination, source) {
|
||||
for (const property in source) {
|
||||
destination[property] = source[property];
|
||||
}
|
||||
return destination;
|
||||
}
|
||||
|
||||
function unitTestInit() {
|
||||
initializePDFJS(function () {
|
||||
htmlReporter.initialize();
|
||||
env.execute();
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
document.readyState === "interactive" ||
|
||||
document.readyState === "complete"
|
||||
) {
|
||||
unitTestInit();
|
||||
} else {
|
||||
document.addEventListener("DOMContentLoaded", unitTestInit, true);
|
||||
}
|
||||
})();
|
||||
388
test/unit/message_handler_spec.js
Normal file
388
test/unit/message_handler_spec.js
Normal file
@@ -0,0 +1,388 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
AbortException,
|
||||
UnknownErrorException,
|
||||
} from "../../src/shared/util.js";
|
||||
import { LoopbackPort } from "../../src/display/api_utils.js";
|
||||
import { MessageHandler } from "../../src/shared/message_handler.js";
|
||||
|
||||
describe("message_handler", function () {
|
||||
// Sleep function to wait for sometime, similar to setTimeout but faster.
|
||||
function sleep(ticks) {
|
||||
return Promise.resolve().then(() => ticks && sleep(ticks - 1));
|
||||
}
|
||||
|
||||
describe("sendWithStream", function () {
|
||||
it("should return a ReadableStream", function () {
|
||||
const port = new LoopbackPort();
|
||||
const messageHandler1 = new MessageHandler("main", "worker", port);
|
||||
const readable = messageHandler1.sendWithStream("fakeHandler");
|
||||
// Check if readable is an instance of ReadableStream.
|
||||
expect(typeof readable).toEqual("object");
|
||||
expect(typeof readable.getReader).toEqual("function");
|
||||
});
|
||||
|
||||
it("should read using a reader", async function () {
|
||||
let log = "";
|
||||
const port = new LoopbackPort();
|
||||
const messageHandler1 = new MessageHandler("main", "worker", port);
|
||||
const messageHandler2 = new MessageHandler("worker", "main", port);
|
||||
messageHandler2.on("fakeHandler", (data, sink) => {
|
||||
sink.onPull = function () {
|
||||
log += "p";
|
||||
};
|
||||
sink.onCancel = function (reason) {
|
||||
log += "c";
|
||||
};
|
||||
sink.ready
|
||||
.then(() => {
|
||||
sink.enqueue("hi");
|
||||
return sink.ready;
|
||||
})
|
||||
.then(() => {
|
||||
sink.close();
|
||||
});
|
||||
return sleep(5);
|
||||
});
|
||||
const readable = messageHandler1.sendWithStream(
|
||||
"fakeHandler",
|
||||
{},
|
||||
{
|
||||
highWaterMark: 1,
|
||||
size() {
|
||||
return 1;
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const reader = readable.getReader();
|
||||
await sleep(10);
|
||||
expect(log).toEqual("");
|
||||
|
||||
let result = await reader.read();
|
||||
expect(log).toEqual("p");
|
||||
expect(result.value).toEqual("hi");
|
||||
expect(result.done).toEqual(false);
|
||||
|
||||
await sleep(10);
|
||||
result = await reader.read();
|
||||
expect(result.value).toEqual(undefined);
|
||||
expect(result.done).toEqual(true);
|
||||
});
|
||||
|
||||
it("should not read any data when cancelled", async function () {
|
||||
let log = "";
|
||||
const port = new LoopbackPort();
|
||||
const messageHandler2 = new MessageHandler("worker", "main", port);
|
||||
messageHandler2.on("fakeHandler", (data, sink) => {
|
||||
sink.onPull = function () {
|
||||
log += "p";
|
||||
};
|
||||
sink.onCancel = function (reason) {
|
||||
log += "c";
|
||||
};
|
||||
log += "0";
|
||||
sink.ready
|
||||
.then(() => {
|
||||
log += "1";
|
||||
sink.enqueue([1, 2, 3, 4], 4);
|
||||
return sink.ready;
|
||||
})
|
||||
.then(() => {
|
||||
log += "2";
|
||||
sink.enqueue([5, 6, 7, 8], 4);
|
||||
return sink.ready;
|
||||
})
|
||||
.then(
|
||||
() => {
|
||||
log += "3";
|
||||
sink.close();
|
||||
},
|
||||
() => {
|
||||
log += "4";
|
||||
}
|
||||
);
|
||||
});
|
||||
const messageHandler1 = new MessageHandler("main", "worker", port);
|
||||
const readable = messageHandler1.sendWithStream(
|
||||
"fakeHandler",
|
||||
{},
|
||||
{
|
||||
highWaterMark: 4,
|
||||
size(arr) {
|
||||
return arr.length;
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const reader = readable.getReader();
|
||||
await sleep(10);
|
||||
expect(log).toEqual("01");
|
||||
|
||||
const result = await reader.read();
|
||||
expect(result.value).toEqual([1, 2, 3, 4]);
|
||||
expect(result.done).toEqual(false);
|
||||
|
||||
await sleep(10);
|
||||
expect(log).toEqual("01p2");
|
||||
|
||||
await reader.cancel(new AbortException("reader cancelled."));
|
||||
expect(log).toEqual("01p2c4");
|
||||
});
|
||||
|
||||
it("should not read when errored", async function () {
|
||||
let log = "";
|
||||
const port = new LoopbackPort();
|
||||
const messageHandler2 = new MessageHandler("worker", "main", port);
|
||||
messageHandler2.on("fakeHandler", (data, sink) => {
|
||||
sink.onPull = function () {
|
||||
log += "p";
|
||||
};
|
||||
sink.onCancel = function (reason) {
|
||||
log += "c";
|
||||
};
|
||||
log += "0";
|
||||
sink.ready
|
||||
.then(() => {
|
||||
log += "1";
|
||||
sink.enqueue([1, 2, 3, 4], 4);
|
||||
return sink.ready;
|
||||
})
|
||||
.then(() => {
|
||||
log += "e";
|
||||
sink.error(new Error("should not read when errored"));
|
||||
});
|
||||
});
|
||||
const messageHandler1 = new MessageHandler("main", "worker", port);
|
||||
const readable = messageHandler1.sendWithStream(
|
||||
"fakeHandler",
|
||||
{},
|
||||
{
|
||||
highWaterMark: 4,
|
||||
size(arr) {
|
||||
return arr.length;
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const reader = readable.getReader();
|
||||
await sleep(10);
|
||||
expect(log).toEqual("01");
|
||||
|
||||
const result = await reader.read();
|
||||
expect(result.value).toEqual([1, 2, 3, 4]);
|
||||
expect(result.done).toEqual(false);
|
||||
|
||||
try {
|
||||
await reader.read();
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (reason) {
|
||||
expect(log).toEqual("01pe");
|
||||
expect(reason instanceof UnknownErrorException).toEqual(true);
|
||||
expect(reason.message).toEqual("should not read when errored");
|
||||
}
|
||||
});
|
||||
|
||||
it("should read data with blocking promise", async function () {
|
||||
let log = "";
|
||||
const port = new LoopbackPort();
|
||||
const messageHandler2 = new MessageHandler("worker", "main", port);
|
||||
messageHandler2.on("fakeHandler", (data, sink) => {
|
||||
sink.onPull = function () {
|
||||
log += "p";
|
||||
};
|
||||
sink.onCancel = function (reason) {
|
||||
log += "c";
|
||||
};
|
||||
log += "0";
|
||||
sink.ready
|
||||
.then(() => {
|
||||
log += "1";
|
||||
sink.enqueue([1, 2, 3, 4], 4);
|
||||
return sink.ready;
|
||||
})
|
||||
.then(() => {
|
||||
log += "2";
|
||||
sink.enqueue([5, 6, 7, 8], 4);
|
||||
return sink.ready;
|
||||
})
|
||||
.then(() => {
|
||||
sink.close();
|
||||
});
|
||||
});
|
||||
|
||||
const messageHandler1 = new MessageHandler("main", "worker", port);
|
||||
const readable = messageHandler1.sendWithStream(
|
||||
"fakeHandler",
|
||||
{},
|
||||
{
|
||||
highWaterMark: 4,
|
||||
size(arr) {
|
||||
return arr.length;
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const reader = readable.getReader();
|
||||
// Sleep for 10ms, so that read() is not unblocking the ready promise.
|
||||
// Chain all read() to stream in sequence.
|
||||
await sleep(10);
|
||||
expect(log).toEqual("01");
|
||||
|
||||
let result = await reader.read();
|
||||
expect(result.value).toEqual([1, 2, 3, 4]);
|
||||
expect(result.done).toEqual(false);
|
||||
|
||||
await sleep(10);
|
||||
expect(log).toEqual("01p2");
|
||||
|
||||
result = await reader.read();
|
||||
expect(result.value).toEqual([5, 6, 7, 8]);
|
||||
expect(result.done).toEqual(false);
|
||||
|
||||
await sleep(10);
|
||||
expect(log).toEqual("01p2p");
|
||||
|
||||
result = await reader.read();
|
||||
expect(result.value).toEqual(undefined);
|
||||
expect(result.done).toEqual(true);
|
||||
});
|
||||
|
||||
it(
|
||||
"should read data with blocking promise and buffer whole data" +
|
||||
" into stream",
|
||||
async function () {
|
||||
let log = "";
|
||||
const port = new LoopbackPort();
|
||||
const messageHandler2 = new MessageHandler("worker", "main", port);
|
||||
messageHandler2.on("fakeHandler", (data, sink) => {
|
||||
sink.onPull = function () {
|
||||
log += "p";
|
||||
};
|
||||
sink.onCancel = function (reason) {
|
||||
log += "c";
|
||||
};
|
||||
log += "0";
|
||||
sink.ready
|
||||
.then(() => {
|
||||
log += "1";
|
||||
sink.enqueue([1, 2, 3, 4], 4);
|
||||
return sink.ready;
|
||||
})
|
||||
.then(() => {
|
||||
log += "2";
|
||||
sink.enqueue([5, 6, 7, 8], 4);
|
||||
return sink.ready;
|
||||
})
|
||||
.then(() => {
|
||||
sink.close();
|
||||
});
|
||||
return sleep(10);
|
||||
});
|
||||
|
||||
const messageHandler1 = new MessageHandler("main", "worker", port);
|
||||
const readable = messageHandler1.sendWithStream(
|
||||
"fakeHandler",
|
||||
{},
|
||||
{
|
||||
highWaterMark: 8,
|
||||
size(arr) {
|
||||
return arr.length;
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const reader = readable.getReader();
|
||||
await sleep(10);
|
||||
expect(log).toEqual("012");
|
||||
|
||||
let result = await reader.read();
|
||||
expect(result.value).toEqual([1, 2, 3, 4]);
|
||||
expect(result.done).toEqual(false);
|
||||
|
||||
await sleep(10);
|
||||
expect(log).toEqual("012p");
|
||||
|
||||
result = await reader.read();
|
||||
expect(result.value).toEqual([5, 6, 7, 8]);
|
||||
expect(result.done).toEqual(false);
|
||||
|
||||
await sleep(10);
|
||||
expect(log).toEqual("012p");
|
||||
|
||||
result = await reader.read();
|
||||
expect(result.value).toEqual(undefined);
|
||||
expect(result.done).toEqual(true);
|
||||
}
|
||||
);
|
||||
|
||||
it("should ignore any pull after close is called", async function () {
|
||||
let log = "";
|
||||
const port = new LoopbackPort();
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
const messageHandler2 = new MessageHandler("worker", "main", port);
|
||||
messageHandler2.on("fakeHandler", (data, sink) => {
|
||||
sink.onPull = function () {
|
||||
log += "p";
|
||||
};
|
||||
sink.onCancel = function (reason) {
|
||||
log += "c";
|
||||
};
|
||||
log += "0";
|
||||
sink.ready.then(() => {
|
||||
log += "1";
|
||||
sink.enqueue([1, 2, 3, 4], 4);
|
||||
});
|
||||
return promise.then(() => {
|
||||
sink.close();
|
||||
});
|
||||
});
|
||||
|
||||
const messageHandler1 = new MessageHandler("main", "worker", port);
|
||||
const readable = messageHandler1.sendWithStream(
|
||||
"fakeHandler",
|
||||
{},
|
||||
{
|
||||
highWaterMark: 10,
|
||||
size(arr) {
|
||||
return arr.length;
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const reader = readable.getReader();
|
||||
await sleep(10);
|
||||
expect(log).toEqual("01");
|
||||
|
||||
resolve();
|
||||
await promise;
|
||||
|
||||
let result = await reader.read();
|
||||
expect(result.value).toEqual([1, 2, 3, 4]);
|
||||
expect(result.done).toEqual(false);
|
||||
|
||||
await sleep(10);
|
||||
expect(log).toEqual("01");
|
||||
|
||||
result = await reader.read();
|
||||
expect(result.value).toEqual(undefined);
|
||||
expect(result.done).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
249
test/unit/metadata_spec.js
Normal file
249
test/unit/metadata_spec.js
Normal file
@@ -0,0 +1,249 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Metadata } from "../../src/display/metadata.js";
|
||||
import { MetadataParser } from "../../src/core/metadata_parser.js";
|
||||
|
||||
function createMetadata(data) {
|
||||
const metadataParser = new MetadataParser(data);
|
||||
return new Metadata(metadataParser.serializable);
|
||||
}
|
||||
|
||||
describe("metadata", function () {
|
||||
it("should handle valid metadata", function () {
|
||||
const data =
|
||||
"<x:xmpmeta xmlns:x='adobe:ns:meta/'>" +
|
||||
"<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'>" +
|
||||
"<rdf:Description xmlns:dc='http://purl.org/dc/elements/1.1/'>" +
|
||||
'<dc:title><rdf:Alt><rdf:li xml:lang="x-default">Foo bar baz</rdf:li>' +
|
||||
"</rdf:Alt></dc:title></rdf:Description></rdf:RDF></x:xmpmeta>";
|
||||
const metadata = createMetadata(data);
|
||||
|
||||
expect(metadata.get("dc:title")).toEqual("Foo bar baz");
|
||||
expect(metadata.get("dc:qux")).toEqual(null);
|
||||
|
||||
expect([...metadata]).toEqual([["dc:title", "Foo bar baz"]]);
|
||||
});
|
||||
|
||||
it("should repair and handle invalid metadata", function () {
|
||||
const data =
|
||||
"<x:xmpmeta xmlns:x='adobe:ns:meta/'>" +
|
||||
"<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'>" +
|
||||
"<rdf:Description xmlns:dc='http://purl.org/dc/elements/1.1/'>" +
|
||||
"<dc:title>\\376\\377\\000P\\000D\\000F\\000&</dc:title>" +
|
||||
"</rdf:Description></rdf:RDF></x:xmpmeta>";
|
||||
const metadata = createMetadata(data);
|
||||
|
||||
expect(metadata.get("dc:title")).toEqual("PDF&");
|
||||
expect(metadata.get("dc:qux")).toEqual(null);
|
||||
|
||||
expect([...metadata]).toEqual([["dc:title", "PDF&"]]);
|
||||
});
|
||||
|
||||
it("should repair and handle invalid metadata (bug 1424938)", function () {
|
||||
const data =
|
||||
"<x:xmpmeta xmlns:x='adobe:ns:meta/' " +
|
||||
"x:xmptk='XMP toolkit 2.9.1-13, framework 1.6'>" +
|
||||
"<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#' " +
|
||||
"xmlns:iX='http://ns.adobe.com/iX/1.0/'>" +
|
||||
"<rdf:Description rdf:about='61652fa7-fc1f-11dd-0000-ce81d41f9ecf' " +
|
||||
"xmlns:pdf='http://ns.adobe.com/pdf/1.3/' " +
|
||||
"pdf:Producer='GPL Ghostscript 8.63'/>" +
|
||||
"<rdf:Description rdf:about='61652fa7-fc1f-11dd-0000-ce81d41f9ecf' " +
|
||||
"xmlns:xap='http://ns.adobe.com/xap/1.0/' " +
|
||||
"xap:ModifyDate='2009-02-13T12:42:54+01:00' " +
|
||||
"xap:CreateDate='2009-02-13T12:42:54+01:00'>" +
|
||||
"<xap:CreatorTool>\\376\\377\\000P\\000D\\000F\\000C\\000r\\000e\\000a" +
|
||||
"\\000t\\000o\\000r\\000 \\000V\\000e\\000r\\000s\\000i\\000o\\000n" +
|
||||
"\\000 \\0000\\000.\\0009\\000.\\0006</xap:CreatorTool>" +
|
||||
"</rdf:Description><rdf:Description " +
|
||||
"rdf:about='61652fa7-fc1f-11dd-0000-ce81d41f9ecf' " +
|
||||
"xmlns:xapMM='http://ns.adobe.com/xap/1.0/mm/' " +
|
||||
"xapMM:DocumentID='61652fa7-fc1f-11dd-0000-ce81d41f9ecf'/>" +
|
||||
"<rdf:Description rdf:about='61652fa7-fc1f-11dd-0000-ce81d41f9ecf' " +
|
||||
"xmlns:dc='http://purl.org/dc/elements/1.1/' " +
|
||||
"dc:format='application/pdf'><dc:title><rdf:Alt>" +
|
||||
"<rdf:li xml:lang='x-default'>\\376\\377\\000L\\000'\\000O\\000d" +
|
||||
"\\000i\\000s\\000s\\000e\\000e\\000 \\000t\\000h\\000\\351\\000m\\000a" +
|
||||
"\\000t\\000i\\000q\\000u\\000e\\000 \\000l\\000o\\000g\\000o\\000 " +
|
||||
"\\000O\\000d\\000i\\000s\\000s\\000\\351\\000\\351\\000 \\000-\\000 " +
|
||||
"\\000d\\000\\351\\000c\\000e\\000m\\000b\\000r\\000e\\000 \\0002\\0000" +
|
||||
"\\0000\\0008\\000.\\000p\\000u\\000b</rdf:li></rdf:Alt></dc:title>" +
|
||||
"<dc:creator><rdf:Seq><rdf:li>\\376\\377\\000O\\000D\\000I\\000S" +
|
||||
"</rdf:li></rdf:Seq></dc:creator></rdf:Description></rdf:RDF>" +
|
||||
"</x:xmpmeta>";
|
||||
const metadata = createMetadata(data);
|
||||
|
||||
expect(metadata.get("dc:title")).toEqual(
|
||||
"L'Odissee thématique logo Odisséé - décembre 2008.pub"
|
||||
);
|
||||
expect(metadata.get("dc:qux")).toEqual(null);
|
||||
|
||||
expect([...metadata].sort()).toEqual([
|
||||
["dc:creator", ["ODIS"]],
|
||||
["dc:title", "L'Odissee thématique logo Odisséé - décembre 2008.pub"],
|
||||
["xap:creatortool", "PDFCreator Version 0.9.6"],
|
||||
]);
|
||||
});
|
||||
|
||||
it("should gracefully handle incomplete tags (issue 8884)", function () {
|
||||
const data =
|
||||
'<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d' +
|
||||
'<x:xmpmeta xmlns:x="adobe:ns:meta/">' +
|
||||
'<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">' +
|
||||
'<rdf:Description rdf:about=""' +
|
||||
'xmlns:pdfx="http://ns.adobe.com/pdfx/1.3/">' +
|
||||
"</rdf:Description>" +
|
||||
'<rdf:Description rdf:about=""' +
|
||||
'xmlns:xap="http://ns.adobe.com/xap/1.0/">' +
|
||||
"<xap:ModifyDate>2010-03-25T11:20:09-04:00</xap:ModifyDate>" +
|
||||
"<xap:CreateDate>2010-03-25T11:20:09-04:00</xap:CreateDate>" +
|
||||
"<xap:MetadataDate>2010-03-25T11:20:09-04:00</xap:MetadataDate>" +
|
||||
"</rdf:Description>" +
|
||||
'<rdf:Description rdf:about=""' +
|
||||
'xmlns:dc="http://purl.org/dc/elements/1.1/">' +
|
||||
"<dc:format>application/pdf</dc:format>" +
|
||||
"</rdf:Description>" +
|
||||
'<rdf:Description rdf:about=""' +
|
||||
'xmlns:pdfaid="http://www.aiim.org/pdfa/ns/id/">' +
|
||||
"<pdfaid:part>1</pdfaid:part>" +
|
||||
"<pdfaid:conformance>A</pdfaid:conformance>" +
|
||||
"</rdf:Description>" +
|
||||
"</rdf:RDF>" +
|
||||
"</x:xmpmeta>" +
|
||||
'<?xpacket end="w"?>';
|
||||
const metadata = createMetadata(data);
|
||||
|
||||
expect([...metadata]).toEqual([]);
|
||||
});
|
||||
|
||||
it('should gracefully handle "junk" before the actual metadata (issue 10395)', function () {
|
||||
const data =
|
||||
'<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?>' +
|
||||
'<x:xmpmeta x:xmptk="TallComponents PDFObjects 1.0" ' +
|
||||
'xmlns:x="adobe:ns:meta/">' +
|
||||
'<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">' +
|
||||
'<rdf:Description rdf:about="" ' +
|
||||
'xmlns:pdf="http://ns.adobe.com/pdf/1.3/">' +
|
||||
"<pdf:Producer>PDFKit.NET 4.0.102.0</pdf:Producer>" +
|
||||
"<pdf:Keywords></pdf:Keywords>" +
|
||||
"<pdf:PDFVersion>1.7</pdf:PDFVersion></rdf:Description>" +
|
||||
'<rdf:Description rdf:about="" ' +
|
||||
'xmlns:xap="http://ns.adobe.com/xap/1.0/">' +
|
||||
"<xap:CreateDate>2018-12-27T13:50:36-08:00</xap:CreateDate>" +
|
||||
"<xap:ModifyDate>2018-12-27T13:50:38-08:00</xap:ModifyDate>" +
|
||||
"<xap:CreatorTool></xap:CreatorTool>" +
|
||||
"<xap:MetadataDate>2018-12-27T13:50:38-08:00</xap:MetadataDate>" +
|
||||
'</rdf:Description><rdf:Description rdf:about="" ' +
|
||||
'xmlns:dc="http://purl.org/dc/elements/1.1/">' +
|
||||
"<dc:creator><rdf:Seq><rdf:li></rdf:li></rdf:Seq></dc:creator>" +
|
||||
"<dc:subject><rdf:Bag /></dc:subject>" +
|
||||
'<dc:description><rdf:Alt><rdf:li xml:lang="x-default">' +
|
||||
"</rdf:li></rdf:Alt></dc:description>" +
|
||||
'<dc:title><rdf:Alt><rdf:li xml:lang="x-default"></rdf:li>' +
|
||||
"</rdf:Alt></dc:title><dc:format>application/pdf</dc:format>" +
|
||||
'</rdf:Description></rdf:RDF></x:xmpmeta><?xpacket end="w"?>';
|
||||
const metadata = createMetadata(data);
|
||||
|
||||
expect(metadata.get("dc:title")).toEqual("");
|
||||
expect(metadata.get("dc:qux")).toEqual(null);
|
||||
|
||||
expect([...metadata].sort()).toEqual([
|
||||
["dc:creator", [""]],
|
||||
["dc:description", ""],
|
||||
["dc:format", "application/pdf"],
|
||||
["dc:subject", []],
|
||||
["dc:title", ""],
|
||||
["pdf:keywords", ""],
|
||||
["pdf:pdfversion", "1.7"],
|
||||
["pdf:producer", "PDFKit.NET 4.0.102.0"],
|
||||
["xap:createdate", "2018-12-27T13:50:36-08:00"],
|
||||
["xap:creatortool", ""],
|
||||
["xap:metadatadate", "2018-12-27T13:50:38-08:00"],
|
||||
["xap:modifydate", "2018-12-27T13:50:38-08:00"],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should correctly handle metadata containing "&apos" (issue 10407)', function () {
|
||||
const data =
|
||||
"<x:xmpmeta xmlns:x='adobe:ns:meta/'>" +
|
||||
"<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'>" +
|
||||
"<rdf:Description xmlns:dc='http://purl.org/dc/elements/1.1/'>" +
|
||||
"<dc:title><rdf:Alt>" +
|
||||
'<rdf:li xml:lang="x-default">'Foo bar baz'</rdf:li>' +
|
||||
"</rdf:Alt></dc:title></rdf:Description></rdf:RDF></x:xmpmeta>";
|
||||
const metadata = createMetadata(data);
|
||||
|
||||
expect(metadata.get("dc:title")).toEqual("'Foo bar baz'");
|
||||
expect(metadata.get("dc:qux")).toEqual(null);
|
||||
|
||||
expect([...metadata]).toEqual([["dc:title", "'Foo bar baz'"]]);
|
||||
});
|
||||
|
||||
it("should gracefully handle unbalanced end tags (issue 10410)", function () {
|
||||
const data =
|
||||
'<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?>' +
|
||||
'<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">' +
|
||||
'<rdf:Description rdf:about="" ' +
|
||||
'xmlns:pdf="http://ns.adobe.com/pdf/1.3/">' +
|
||||
"<pdf:Producer>Soda PDF 5</pdf:Producer></rdf:Description>" +
|
||||
'<rdf:Description rdf:about="" ' +
|
||||
'xmlns:xap="http://ns.adobe.com/xap/1.0/">' +
|
||||
"<xap:CreateDate>2018-10-02T08:14:49-05:00</xap:CreateDate>" +
|
||||
"<xap:CreatorTool>Soda PDF 5</xap:CreatorTool>" +
|
||||
"<xap:MetadataDate>2018-10-02T08:14:49-05:00</xap:MetadataDate> " +
|
||||
"<xap:ModifyDate>2018-10-02T08:14:49-05:00</xap:ModifyDate>" +
|
||||
'</rdf:Description><rdf:Description rdf:about="" ' +
|
||||
'xmlns:xmpMM="http://ns.adobe.com/xap/1.0/mm/">' +
|
||||
"<xmpMM:DocumentID>uuid:00000000-1c84-3cf9-89ba-bef0e729c831" +
|
||||
"</xmpMM:DocumentID></rdf:Description>" +
|
||||
'</rdf:RDF></x:xmpmeta><?xpacket end="w"?>';
|
||||
const metadata = createMetadata(data);
|
||||
|
||||
expect([...metadata]).toEqual([]);
|
||||
});
|
||||
|
||||
it("should not be vulnerable to the billion laughs attack", function () {
|
||||
const data =
|
||||
'<?xml version="1.0"?>' +
|
||||
"<!DOCTYPE lolz [" +
|
||||
' <!ENTITY lol "lol">' +
|
||||
' <!ENTITY lol1 "&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;">' +
|
||||
' <!ENTITY lol2 "&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;">' +
|
||||
' <!ENTITY lol3 "&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;">' +
|
||||
' <!ENTITY lol4 "&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;">' +
|
||||
' <!ENTITY lol5 "&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;">' +
|
||||
' <!ENTITY lol6 "&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;">' +
|
||||
' <!ENTITY lol7 "&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;">' +
|
||||
' <!ENTITY lol8 "&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;">' +
|
||||
' <!ENTITY lol9 "&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;">' +
|
||||
"]>" +
|
||||
'<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">' +
|
||||
' <rdf:Description xmlns:dc="http://purl.org/dc/elements/1.1/">' +
|
||||
" <dc:title>" +
|
||||
" <rdf:Alt>" +
|
||||
' <rdf:li xml:lang="x-default">a&lol9;b</rdf:li>' +
|
||||
" </rdf:Alt>" +
|
||||
" </dc:title>" +
|
||||
" </rdf:Description>" +
|
||||
"</rdf:RDF>";
|
||||
const metadata = createMetadata(data);
|
||||
|
||||
expect(metadata.get("dc:title")).toEqual("a&lol9;b");
|
||||
expect(metadata.get("dc:qux")).toEqual(null);
|
||||
|
||||
expect([...metadata]).toEqual([["dc:title", "a&lol9;b"]]);
|
||||
});
|
||||
});
|
||||
93
test/unit/murmurhash3_spec.js
Normal file
93
test/unit/murmurhash3_spec.js
Normal file
@@ -0,0 +1,93 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { MurmurHash3_64 } from "../../src/shared/murmurhash3.js";
|
||||
|
||||
describe("MurmurHash3_64", function () {
|
||||
it("instantiates without seed", function () {
|
||||
const hash = new MurmurHash3_64();
|
||||
expect(hash).toEqual(jasmine.any(MurmurHash3_64));
|
||||
});
|
||||
it("instantiates with seed", function () {
|
||||
const hash = new MurmurHash3_64(1);
|
||||
expect(hash).toEqual(jasmine.any(MurmurHash3_64));
|
||||
});
|
||||
|
||||
const hexDigestExpected = "f61cfdbfdae0f65e";
|
||||
const sourceText = "test";
|
||||
const sourceCharCodes = [116, 101, 115, 116]; // 't','e','s','t'
|
||||
it("correctly generates a hash from a string", function () {
|
||||
const hash = new MurmurHash3_64();
|
||||
hash.update(sourceText);
|
||||
expect(hash.hexdigest()).toEqual(hexDigestExpected);
|
||||
});
|
||||
it("correctly generates a hash from a Uint8Array", function () {
|
||||
const hash = new MurmurHash3_64();
|
||||
hash.update(new Uint8Array(sourceCharCodes));
|
||||
expect(hash.hexdigest()).toEqual(hexDigestExpected);
|
||||
});
|
||||
it("correctly generates a hash from a Uint32Array", function () {
|
||||
const hash = new MurmurHash3_64();
|
||||
hash.update(new Uint32Array(new Uint8Array(sourceCharCodes).buffer));
|
||||
expect(hash.hexdigest()).toEqual(hexDigestExpected);
|
||||
});
|
||||
|
||||
it("changes the hash after update without seed", function () {
|
||||
const hash = new MurmurHash3_64();
|
||||
hash.update(sourceText);
|
||||
const hexdigest1 = hash.hexdigest();
|
||||
hash.update(sourceText);
|
||||
const hexdigest2 = hash.hexdigest();
|
||||
expect(hexdigest1).not.toEqual(hexdigest2);
|
||||
});
|
||||
it("changes the hash after update with seed", function () {
|
||||
const hash = new MurmurHash3_64(1);
|
||||
hash.update(sourceText);
|
||||
const hexdigest1 = hash.hexdigest();
|
||||
hash.update(sourceText);
|
||||
const hexdigest2 = hash.hexdigest();
|
||||
expect(hexdigest1).not.toEqual(hexdigest2);
|
||||
});
|
||||
|
||||
it(
|
||||
"generates correct hashes for TypedArrays which share the same " +
|
||||
"underlying ArrayBuffer (issue 12533)",
|
||||
function () {
|
||||
// prettier-ignore
|
||||
const typedArray = new Uint8Array([
|
||||
0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1
|
||||
]);
|
||||
const startArray = new Uint8Array(typedArray.buffer, 0, 10);
|
||||
const endArray = new Uint8Array(typedArray.buffer, 10, 10);
|
||||
|
||||
expect(startArray).not.toEqual(endArray);
|
||||
|
||||
const startHash = new MurmurHash3_64();
|
||||
startHash.update(startArray);
|
||||
const startHexdigest = startHash.hexdigest();
|
||||
|
||||
const endHash = new MurmurHash3_64();
|
||||
endHash.update(endArray);
|
||||
const endHexdigest = endHash.hexdigest();
|
||||
|
||||
// The two hashes *must* be different.
|
||||
expect(startHexdigest).not.toEqual(endHexdigest);
|
||||
|
||||
expect(startHexdigest).toEqual("a49de339cc5b0819");
|
||||
expect(endHexdigest).toEqual("f81a92d9e214ab35");
|
||||
}
|
||||
);
|
||||
});
|
||||
200
test/unit/network_spec.js
Normal file
200
test/unit/network_spec.js
Normal file
@@ -0,0 +1,200 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { AbortException, ResponseException } from "../../src/shared/util.js";
|
||||
import { PDFNetworkStream } from "../../src/display/network.js";
|
||||
import { testCrossOriginRedirects } from "./common_pdfstream_tests.js";
|
||||
import { TestPdfsServer } from "./test_utils.js";
|
||||
|
||||
describe("network", function () {
|
||||
const pdf1 = new URL("../pdfs/tracemonkey.pdf", window.location).href;
|
||||
const pdf1Length = 1016315;
|
||||
|
||||
it("read without stream and range", async function () {
|
||||
const stream = new PDFNetworkStream({
|
||||
url: pdf1,
|
||||
rangeChunkSize: 65536,
|
||||
disableStream: true,
|
||||
disableRange: true,
|
||||
});
|
||||
|
||||
const fullReader = stream.getFullReader();
|
||||
|
||||
let isStreamingSupported, isRangeSupported;
|
||||
await fullReader.headersReady.then(function () {
|
||||
isStreamingSupported = fullReader.isStreamingSupported;
|
||||
isRangeSupported = fullReader.isRangeSupported;
|
||||
});
|
||||
|
||||
let len = 0,
|
||||
count = 0;
|
||||
const read = function () {
|
||||
return fullReader.read().then(function (result) {
|
||||
if (result.done) {
|
||||
return undefined;
|
||||
}
|
||||
count++;
|
||||
len += result.value.byteLength;
|
||||
return read();
|
||||
});
|
||||
};
|
||||
|
||||
await read();
|
||||
|
||||
expect(len).toEqual(pdf1Length);
|
||||
expect(count).toEqual(1);
|
||||
expect(isStreamingSupported).toEqual(false);
|
||||
expect(isRangeSupported).toEqual(false);
|
||||
});
|
||||
|
||||
it("read custom ranges", async function () {
|
||||
// We don't test on browsers that don't support range request, so
|
||||
// requiring this test to pass.
|
||||
const rangeSize = 32768;
|
||||
const stream = new PDFNetworkStream({
|
||||
url: pdf1,
|
||||
length: pdf1Length,
|
||||
rangeChunkSize: rangeSize,
|
||||
disableStream: true,
|
||||
disableRange: false,
|
||||
});
|
||||
|
||||
const fullReader = stream.getFullReader();
|
||||
|
||||
let isStreamingSupported, isRangeSupported, fullReaderCancelled;
|
||||
await fullReader.headersReady.then(function () {
|
||||
isStreamingSupported = fullReader.isStreamingSupported;
|
||||
isRangeSupported = fullReader.isRangeSupported;
|
||||
// we shall be able to close the full reader without issues
|
||||
fullReader.cancel(new AbortException("Don't need fullReader."));
|
||||
fullReaderCancelled = true;
|
||||
});
|
||||
|
||||
// Skipping fullReader results, requesting something from the PDF end.
|
||||
const tailSize = pdf1Length % rangeSize || rangeSize;
|
||||
|
||||
const range1Reader = stream.getRangeReader(
|
||||
pdf1Length - tailSize - rangeSize,
|
||||
pdf1Length - tailSize
|
||||
);
|
||||
const range2Reader = stream.getRangeReader(
|
||||
pdf1Length - tailSize,
|
||||
pdf1Length
|
||||
);
|
||||
|
||||
const result1 = { value: 0 },
|
||||
result2 = { value: 0 };
|
||||
const read = function (reader, lenResult) {
|
||||
return reader.read().then(function (result) {
|
||||
if (result.done) {
|
||||
return undefined;
|
||||
}
|
||||
lenResult.value += result.value.byteLength;
|
||||
return read(reader, lenResult);
|
||||
});
|
||||
};
|
||||
|
||||
await Promise.all([
|
||||
read(range1Reader, result1),
|
||||
read(range2Reader, result2),
|
||||
]);
|
||||
|
||||
expect(result1.value).toEqual(rangeSize);
|
||||
expect(result2.value).toEqual(tailSize);
|
||||
expect(isStreamingSupported).toEqual(false);
|
||||
expect(isRangeSupported).toEqual(true);
|
||||
expect(fullReaderCancelled).toEqual(true);
|
||||
});
|
||||
|
||||
it(`handle reading ranges with missing/invalid "Content-Range" header`, async function () {
|
||||
if (globalThis.chrome) {
|
||||
pending("Fails intermittently in Google Chrome.");
|
||||
}
|
||||
|
||||
async function readRanges(mode) {
|
||||
const rangeSize = 32768;
|
||||
const stream = new PDFNetworkStream({
|
||||
url: `${pdf1}?test-network-break-ranges=${mode}`,
|
||||
length: pdf1Length,
|
||||
rangeChunkSize: rangeSize,
|
||||
disableStream: true,
|
||||
disableRange: false,
|
||||
});
|
||||
|
||||
const fullReader = stream.getFullReader();
|
||||
|
||||
await fullReader.headersReady;
|
||||
// Ensure that range requests are supported.
|
||||
expect(fullReader.isRangeSupported).toEqual(true);
|
||||
// We shall be able to close the full reader without issues.
|
||||
fullReader.cancel(new AbortException("Don't need fullReader."));
|
||||
|
||||
const rangeReader = stream.getRangeReader(
|
||||
pdf1Length - rangeSize,
|
||||
pdf1Length
|
||||
);
|
||||
|
||||
try {
|
||||
await rangeReader.read();
|
||||
|
||||
// Shouldn't get here.
|
||||
expect(false).toEqual(true);
|
||||
} catch (ex) {
|
||||
expect(ex instanceof ResponseException).toEqual(true);
|
||||
expect(ex.status).toEqual(0);
|
||||
expect(ex.missing).toEqual(false);
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all([readRanges("missing"), readRanges("invalid")]);
|
||||
});
|
||||
|
||||
describe("Redirects", function () {
|
||||
beforeAll(async function () {
|
||||
await TestPdfsServer.ensureStarted();
|
||||
});
|
||||
|
||||
afterAll(async function () {
|
||||
await TestPdfsServer.ensureStopped();
|
||||
});
|
||||
|
||||
it("redirects allowed if all responses are same-origin", async function () {
|
||||
await testCrossOriginRedirects({
|
||||
PDFStreamClass: PDFNetworkStream,
|
||||
redirectIfRange: false,
|
||||
async testRangeReader(rangeReader) {
|
||||
await expectAsync(rangeReader.read()).toBeResolved();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("redirects blocked if any response is cross-origin", async function () {
|
||||
await testCrossOriginRedirects({
|
||||
PDFStreamClass: PDFNetworkStream,
|
||||
redirectIfRange: true,
|
||||
async testRangeReader(rangeReader) {
|
||||
// When read (sync), error should be reported.
|
||||
await expectAsync(rangeReader.read()).toBeRejectedWithError(
|
||||
/^Expected range response-origin "http:.*" to match "http:.*"\.$/
|
||||
);
|
||||
// When read again (async), error should be consistent.
|
||||
await expectAsync(rangeReader.read()).toBeRejectedWithError(
|
||||
/^Expected range response-origin "http:.*" to match "http:.*"\.$/
|
||||
);
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
408
test/unit/network_utils_spec.js
Normal file
408
test/unit/network_utils_spec.js
Normal file
@@ -0,0 +1,408 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
createHeaders,
|
||||
createResponseError,
|
||||
extractFilenameFromHeader,
|
||||
validateRangeRequestCapabilities,
|
||||
validateResponseStatus,
|
||||
} from "../../src/display/network_utils.js";
|
||||
import { ResponseException } from "../../src/shared/util.js";
|
||||
|
||||
describe("network_utils", function () {
|
||||
describe("createHeaders", function () {
|
||||
it("returns empty `Headers` for invalid input", function () {
|
||||
const headersArr = [
|
||||
createHeaders(
|
||||
/* isHttp = */ false,
|
||||
/* httpHeaders = */ { "Content-Length": 100 }
|
||||
),
|
||||
createHeaders(/* isHttp = */ true, /* httpHeaders = */ undefined),
|
||||
createHeaders(/* isHttp = */ true, /* httpHeaders = */ null),
|
||||
createHeaders(/* isHttp = */ true, /* httpHeaders = */ "abc"),
|
||||
createHeaders(/* isHttp = */ true, /* httpHeaders = */ 123),
|
||||
];
|
||||
const emptyObj = Object.create(null);
|
||||
|
||||
for (const headers of headersArr) {
|
||||
expect(Object.fromEntries(headers)).toEqual(emptyObj);
|
||||
}
|
||||
});
|
||||
|
||||
it("returns populated `Headers` for valid input", function () {
|
||||
const headers = createHeaders(
|
||||
/* isHttp = */ true,
|
||||
/* httpHeaders = */ {
|
||||
"Content-Length": 100,
|
||||
"Accept-Ranges": "bytes",
|
||||
"Dummy-null": null,
|
||||
"Dummy-undefined": undefined,
|
||||
}
|
||||
);
|
||||
|
||||
expect(Object.fromEntries(headers)).toEqual({
|
||||
"content-length": "100",
|
||||
"accept-ranges": "bytes",
|
||||
"dummy-null": "null",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateRangeRequestCapabilities", function () {
|
||||
it("rejects invalid rangeChunkSize", function () {
|
||||
expect(function () {
|
||||
validateRangeRequestCapabilities({ rangeChunkSize: "abc" });
|
||||
}).toThrow(
|
||||
new Error("rangeChunkSize must be an integer larger than zero.")
|
||||
);
|
||||
|
||||
expect(function () {
|
||||
validateRangeRequestCapabilities({ rangeChunkSize: 0 });
|
||||
}).toThrow(
|
||||
new Error("rangeChunkSize must be an integer larger than zero.")
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects disabled or non-HTTP range requests", function () {
|
||||
expect(
|
||||
validateRangeRequestCapabilities({
|
||||
disableRange: true,
|
||||
isHttp: true,
|
||||
responseHeaders: new Headers({
|
||||
"Content-Length": 8,
|
||||
}),
|
||||
rangeChunkSize: 64,
|
||||
})
|
||||
).toEqual({
|
||||
allowRangeRequests: false,
|
||||
suggestedLength: 8,
|
||||
});
|
||||
|
||||
expect(
|
||||
validateRangeRequestCapabilities({
|
||||
disableRange: false,
|
||||
isHttp: false,
|
||||
responseHeaders: new Headers({
|
||||
"Content-Length": 8,
|
||||
}),
|
||||
rangeChunkSize: 64,
|
||||
})
|
||||
).toEqual({
|
||||
allowRangeRequests: false,
|
||||
suggestedLength: 8,
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects invalid Accept-Ranges header values", function () {
|
||||
expect(
|
||||
validateRangeRequestCapabilities({
|
||||
disableRange: false,
|
||||
isHttp: true,
|
||||
responseHeaders: new Headers({
|
||||
"Accept-Ranges": "none",
|
||||
"Content-Length": 8,
|
||||
}),
|
||||
rangeChunkSize: 64,
|
||||
})
|
||||
).toEqual({
|
||||
allowRangeRequests: false,
|
||||
suggestedLength: 8,
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects invalid Content-Encoding header values", function () {
|
||||
expect(
|
||||
validateRangeRequestCapabilities({
|
||||
disableRange: false,
|
||||
isHttp: true,
|
||||
responseHeaders: new Headers({
|
||||
"Accept-Ranges": "bytes",
|
||||
"Content-Encoding": "gzip",
|
||||
"Content-Length": 8,
|
||||
}),
|
||||
rangeChunkSize: 64,
|
||||
})
|
||||
).toEqual({
|
||||
allowRangeRequests: false,
|
||||
suggestedLength: 8,
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects invalid Content-Length header values", function () {
|
||||
expect(
|
||||
validateRangeRequestCapabilities({
|
||||
disableRange: false,
|
||||
isHttp: true,
|
||||
responseHeaders: new Headers({
|
||||
"Accept-Ranges": "bytes",
|
||||
"Content-Length": "eight",
|
||||
}),
|
||||
rangeChunkSize: 64,
|
||||
})
|
||||
).toEqual({
|
||||
allowRangeRequests: false,
|
||||
suggestedLength: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects file sizes that are too small for range requests", function () {
|
||||
expect(
|
||||
validateRangeRequestCapabilities({
|
||||
disableRange: false,
|
||||
isHttp: true,
|
||||
responseHeaders: new Headers({
|
||||
"Accept-Ranges": "bytes",
|
||||
"Content-Length": 8,
|
||||
}),
|
||||
rangeChunkSize: 64,
|
||||
})
|
||||
).toEqual({
|
||||
allowRangeRequests: false,
|
||||
suggestedLength: 8,
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts file sizes large enough for range requests", function () {
|
||||
expect(
|
||||
validateRangeRequestCapabilities({
|
||||
disableRange: false,
|
||||
isHttp: true,
|
||||
responseHeaders: new Headers({
|
||||
"Accept-Ranges": "bytes",
|
||||
"Content-Length": 8192,
|
||||
}),
|
||||
rangeChunkSize: 64,
|
||||
})
|
||||
).toEqual({
|
||||
allowRangeRequests: true,
|
||||
suggestedLength: 8192,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractFilenameFromHeader", function () {
|
||||
it("returns null when content disposition header is blank", function () {
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
// Empty headers.
|
||||
})
|
||||
)
|
||||
).toBeNull();
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": "",
|
||||
})
|
||||
)
|
||||
).toBeNull();
|
||||
});
|
||||
|
||||
it("gets the filename from the response header", function () {
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": "inline",
|
||||
})
|
||||
)
|
||||
).toBeNull();
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": "attachment",
|
||||
})
|
||||
)
|
||||
).toBeNull();
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": 'attachment; filename="filename.pdf"',
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition":
|
||||
'attachment; filename="filename.pdf and spaces.pdf"',
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf and spaces.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": 'attachment; filename="tl;dr.pdf"',
|
||||
})
|
||||
)
|
||||
).toEqual("tl;dr.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": "attachment; filename=filename.pdf",
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition":
|
||||
"attachment; filename=filename.pdf someotherparam",
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition":
|
||||
'attachment; filename="%e4%b8%ad%e6%96%87.pdf"',
|
||||
})
|
||||
)
|
||||
).toEqual("中文.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": 'attachment; filename="100%.pdf"',
|
||||
})
|
||||
)
|
||||
).toEqual("100%.pdf");
|
||||
});
|
||||
|
||||
it("gets the filename from the response header (RFC 6266)", function () {
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": "attachment; filename*=filename.pdf",
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": "attachment; filename*=''filename.pdf",
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": "attachment; filename*=utf-8''filename.pdf",
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition":
|
||||
"attachment; filename=no.pdf; filename*=utf-8''filename.pdf",
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition":
|
||||
"attachment; filename*=utf-8''filename.pdf; filename=no.pdf",
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
});
|
||||
|
||||
it("gets the filename from the response header (RFC 2231)", function () {
|
||||
// Tests continuations (RFC 2231 section 3, via RFC 5987 section 3.1).
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition":
|
||||
"attachment; filename*0=filename; filename*1=.pdf",
|
||||
})
|
||||
)
|
||||
).toEqual("filename.pdf");
|
||||
});
|
||||
|
||||
it("only extracts filename with pdf extension", function () {
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition": 'attachment; filename="filename.png"',
|
||||
})
|
||||
)
|
||||
).toBeNull();
|
||||
});
|
||||
|
||||
it("extension validation is case insensitive", function () {
|
||||
expect(
|
||||
extractFilenameFromHeader(
|
||||
new Headers({
|
||||
"Content-Disposition":
|
||||
'form-data; name="fieldName"; filename="file.PdF"',
|
||||
})
|
||||
)
|
||||
).toEqual("file.PdF");
|
||||
});
|
||||
});
|
||||
|
||||
describe("createResponseError", function () {
|
||||
function testCreateResponseError(url, status, missing) {
|
||||
const error = createResponseError(status, url);
|
||||
|
||||
expect(error instanceof ResponseException).toEqual(true);
|
||||
expect(error.message).toEqual(
|
||||
`Unexpected server response (${status}) while retrieving PDF "${url}".`
|
||||
);
|
||||
expect(error.status).toEqual(status);
|
||||
expect(error.missing).toEqual(missing);
|
||||
}
|
||||
|
||||
it("handles missing PDF file responses", function () {
|
||||
testCreateResponseError("https://foo.com/bar.pdf", 404, true);
|
||||
|
||||
testCreateResponseError("file://foo.pdf", 0, true);
|
||||
});
|
||||
|
||||
it("handles unexpected responses", function () {
|
||||
testCreateResponseError("https://foo.com/bar.pdf", 302, false);
|
||||
|
||||
testCreateResponseError("https://foo.com/bar.pdf", 0, false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateResponseStatus", function () {
|
||||
it("accepts valid response statuses", function () {
|
||||
expect(validateResponseStatus(200)).toEqual(true);
|
||||
expect(validateResponseStatus(206)).toEqual(true);
|
||||
});
|
||||
|
||||
it("rejects invalid response statuses", function () {
|
||||
expect(validateResponseStatus(302)).toEqual(false);
|
||||
expect(validateResponseStatus(404)).toEqual(false);
|
||||
expect(validateResponseStatus(null)).toEqual(false);
|
||||
expect(validateResponseStatus(undefined)).toEqual(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
120
test/unit/node_stream_spec.js
Normal file
120
test/unit/node_stream_spec.js
Normal file
@@ -0,0 +1,120 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { AbortException, isNodeJS } from "../../src/shared/util.js";
|
||||
import { PDFNodeStream } from "../../src/display/node_stream.js";
|
||||
|
||||
// Ensure that these tests only run in Node.js environments.
|
||||
if (!isNodeJS) {
|
||||
throw new Error(
|
||||
'The "node_stream" unit-tests can only be run in Node.js environments.'
|
||||
);
|
||||
}
|
||||
|
||||
describe("node_stream", function () {
|
||||
const url = process.getBuiltinModule("url");
|
||||
const cwdURL = url.pathToFileURL(process.cwd()) + "/";
|
||||
const pdf = new URL("./test/pdfs/tracemonkey.pdf", cwdURL).href;
|
||||
const pdfLength = 1016315;
|
||||
|
||||
it("read filesystem pdf files", async function () {
|
||||
const stream = new PDFNodeStream({
|
||||
url: pdf,
|
||||
rangeChunkSize: 65536,
|
||||
disableStream: true,
|
||||
disableRange: true,
|
||||
});
|
||||
|
||||
const fullReader = stream.getFullReader();
|
||||
|
||||
let isStreamingSupported, isRangeSupported;
|
||||
const promise = fullReader.headersReady.then(() => {
|
||||
isStreamingSupported = fullReader.isStreamingSupported;
|
||||
isRangeSupported = fullReader.isRangeSupported;
|
||||
});
|
||||
|
||||
let len = 0;
|
||||
const read = function () {
|
||||
return fullReader.read().then(function (result) {
|
||||
if (result.done) {
|
||||
return undefined;
|
||||
}
|
||||
len += result.value.byteLength;
|
||||
return read();
|
||||
});
|
||||
};
|
||||
|
||||
await Promise.all([read(), promise]);
|
||||
|
||||
expect(isStreamingSupported).toEqual(false);
|
||||
expect(isRangeSupported).toEqual(false);
|
||||
expect(len).toEqual(pdfLength);
|
||||
});
|
||||
|
||||
it("read custom ranges for filesystem urls", async function () {
|
||||
const rangeSize = 32768;
|
||||
const stream = new PDFNodeStream({
|
||||
url: pdf,
|
||||
length: pdfLength,
|
||||
rangeChunkSize: rangeSize,
|
||||
disableStream: true,
|
||||
disableRange: false,
|
||||
});
|
||||
|
||||
const fullReader = stream.getFullReader();
|
||||
|
||||
let isStreamingSupported, isRangeSupported, fullReaderCancelled;
|
||||
const promise = fullReader.headersReady.then(function () {
|
||||
isStreamingSupported = fullReader.isStreamingSupported;
|
||||
isRangeSupported = fullReader.isRangeSupported;
|
||||
// we shall be able to close the full reader without issues
|
||||
fullReader.cancel(new AbortException("Don't need fullReader."));
|
||||
fullReaderCancelled = true;
|
||||
});
|
||||
|
||||
// Skipping fullReader results, requesting something from the PDF end.
|
||||
const tailSize = pdfLength % rangeSize || rangeSize;
|
||||
|
||||
const range1Reader = stream.getRangeReader(
|
||||
pdfLength - tailSize - rangeSize,
|
||||
pdfLength - tailSize
|
||||
);
|
||||
const range2Reader = stream.getRangeReader(pdfLength - tailSize, pdfLength);
|
||||
|
||||
const result1 = { value: 0 },
|
||||
result2 = { value: 0 };
|
||||
const read = function (reader, lenResult) {
|
||||
return reader.read().then(function (result) {
|
||||
if (result.done) {
|
||||
return undefined;
|
||||
}
|
||||
lenResult.value += result.value.byteLength;
|
||||
return read(reader, lenResult);
|
||||
});
|
||||
};
|
||||
|
||||
await Promise.all([
|
||||
read(range1Reader, result1),
|
||||
read(range2Reader, result2),
|
||||
promise,
|
||||
]);
|
||||
|
||||
expect(result1.value).toEqual(rangeSize);
|
||||
expect(result2.value).toEqual(tailSize);
|
||||
expect(isStreamingSupported).toEqual(false);
|
||||
expect(isRangeSupported).toEqual(true);
|
||||
expect(fullReaderCancelled).toEqual(true);
|
||||
});
|
||||
});
|
||||
457
test/unit/parser_spec.js
Normal file
457
test/unit/parser_spec.js
Normal file
@@ -0,0 +1,457 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Cmd, EOF, Name } from "../../src/core/primitives.js";
|
||||
import { Lexer, Linearization, Parser } from "../../src/core/parser.js";
|
||||
import { FormatError } from "../../src/shared/util.js";
|
||||
import { StringStream } from "../../src/core/stream.js";
|
||||
|
||||
describe("parser", function () {
|
||||
describe("Parser", function () {
|
||||
describe("inlineStreamSkipEI", function () {
|
||||
it("should skip over the EI marker if it is found", function () {
|
||||
const string =
|
||||
"q 1 0 0 1 0 0 cm BI /W 10 /H 10 /BPC 1 " +
|
||||
"/F /A85 ID abc123~> EI Q";
|
||||
const input = new StringStream(string);
|
||||
const parser = new Parser({
|
||||
lexer: new Lexer(input),
|
||||
xref: null,
|
||||
allowStreams: true,
|
||||
});
|
||||
|
||||
parser.inlineStreamSkipEI(input);
|
||||
expect(input.pos).toEqual(string.indexOf("Q"));
|
||||
expect(input.peekByte()).toEqual(0x51); // 'Q'
|
||||
});
|
||||
|
||||
it("should skip to the end of stream if the EI marker is not found", function () {
|
||||
const string =
|
||||
"q 1 0 0 1 0 0 cm BI /W 10 /H 10 /BPC 1 /F /A85 ID abc123~> Q";
|
||||
const input = new StringStream(string);
|
||||
const parser = new Parser({
|
||||
lexer: new Lexer(input),
|
||||
xref: null,
|
||||
allowStreams: true,
|
||||
});
|
||||
|
||||
parser.inlineStreamSkipEI(input);
|
||||
expect(input.pos).toEqual(string.length);
|
||||
expect(input.peekByte()).toEqual(-1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Lexer", function () {
|
||||
describe("nextChar", function () {
|
||||
it("should return and set -1 when the end of the stream is reached", function () {
|
||||
const input = new StringStream("");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.nextChar()).toEqual(-1);
|
||||
expect(lexer.currentChar).toEqual(-1);
|
||||
});
|
||||
|
||||
it("should return and set the character after the current position", function () {
|
||||
const input = new StringStream("123");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.nextChar()).toEqual(0x32); // '2'
|
||||
expect(lexer.currentChar).toEqual(0x32); // '2'
|
||||
});
|
||||
});
|
||||
|
||||
describe("peekChar", function () {
|
||||
it("should only return -1 when the end of the stream is reached", function () {
|
||||
const input = new StringStream("");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.peekChar()).toEqual(-1);
|
||||
expect(lexer.currentChar).toEqual(-1);
|
||||
});
|
||||
|
||||
it("should only return the character after the current position", function () {
|
||||
const input = new StringStream("123");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.peekChar()).toEqual(0x32); // '2'
|
||||
expect(lexer.currentChar).toEqual(0x31); // '1'
|
||||
});
|
||||
});
|
||||
|
||||
describe("getNumber", function () {
|
||||
it("should stop parsing numbers at the end of stream", function () {
|
||||
const input = new StringStream("11.234");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.getNumber()).toEqual(11.234);
|
||||
});
|
||||
|
||||
it("should parse PostScript numbers", function () {
|
||||
const numbers = [
|
||||
"-.002",
|
||||
"34.5",
|
||||
"-3.62",
|
||||
"123.6e10",
|
||||
"1E-5",
|
||||
"-1.",
|
||||
"0.0",
|
||||
"123",
|
||||
"-98",
|
||||
"43445",
|
||||
"0",
|
||||
"+17",
|
||||
];
|
||||
for (const number of numbers) {
|
||||
const input = new StringStream(number);
|
||||
const lexer = new Lexer(input);
|
||||
|
||||
const result = lexer.getNumber(),
|
||||
expected = parseFloat(number);
|
||||
|
||||
if (result !== expected && Math.abs(result - expected) < 1e-15) {
|
||||
console.error(
|
||||
`Fuzzy matching "${result}" with "${expected}" to ` +
|
||||
"work-around rounding bugs in Chromium browsers."
|
||||
);
|
||||
|
||||
expect(true).toEqual(true);
|
||||
continue;
|
||||
}
|
||||
expect(result).toEqual(expected);
|
||||
}
|
||||
});
|
||||
|
||||
it("should ignore double negative before number", function () {
|
||||
const input = new StringStream("--205.88");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.getNumber()).toEqual(-205.88);
|
||||
});
|
||||
|
||||
it("should ignore minus signs in the middle of number", function () {
|
||||
const input = new StringStream("205--.88");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.getNumber()).toEqual(205.88);
|
||||
});
|
||||
|
||||
it("should ignore line-breaks between operator and digit in number", function () {
|
||||
const minusInput = new StringStream("-\r\n205.88");
|
||||
const minusLexer = new Lexer(minusInput);
|
||||
expect(minusLexer.getNumber()).toEqual(-205.88);
|
||||
|
||||
const plusInput = new StringStream("+\r\n205.88");
|
||||
const plusLexer = new Lexer(plusInput);
|
||||
expect(plusLexer.getNumber()).toEqual(205.88);
|
||||
});
|
||||
|
||||
it("should treat a single decimal point, or minus/plus sign, as zero", function () {
|
||||
const validNums = [
|
||||
".",
|
||||
"-",
|
||||
"+",
|
||||
"-.",
|
||||
"+.",
|
||||
"-\r\n.",
|
||||
"+\r\n.",
|
||||
"-(",
|
||||
"-<",
|
||||
];
|
||||
for (const number of validNums) {
|
||||
const validInput = new StringStream(number);
|
||||
const validLexer = new Lexer(validInput);
|
||||
|
||||
expect(validLexer.getNumber()).toEqual(0);
|
||||
}
|
||||
|
||||
const invalidNums = ["..", ".-", ".+"];
|
||||
for (const number of invalidNums) {
|
||||
const invalidInput = new StringStream(number);
|
||||
const invalidLexer = new Lexer(invalidInput);
|
||||
|
||||
expect(function () {
|
||||
return invalidLexer.getNumber();
|
||||
}).toThrowError(FormatError, /^Invalid number:\s/);
|
||||
}
|
||||
});
|
||||
|
||||
it("should handle glued numbers and operators", function () {
|
||||
const input = new StringStream("123ET");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.getNumber()).toEqual(123);
|
||||
// The lexer must not have consumed the 'E'
|
||||
expect(lexer.currentChar).toEqual(0x45); // 'E'
|
||||
});
|
||||
});
|
||||
|
||||
describe("getString", function () {
|
||||
it("should stop parsing strings at the end of stream", function () {
|
||||
const input = new StringStream("(1$4)");
|
||||
input.getByte = function (super_getByte) {
|
||||
// Simulating end of file using null (see issue 2766).
|
||||
const ch = super_getByte.call(input);
|
||||
return ch === 0x24 /* '$' */ ? -1 : ch;
|
||||
}.bind(input, input.getByte);
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.getString()).toEqual("1");
|
||||
});
|
||||
|
||||
it("should ignore escaped CR and LF", function () {
|
||||
// '(\101\<CR><LF>\102)' should be parsed as 'AB'.
|
||||
const input = new StringStream("(\\101\\\r\n\\102\\\r\\103\\\n\\104)");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.getString()).toEqual("ABCD");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getHexString", function () {
|
||||
it("should handle an odd number of digits", function () {
|
||||
// '7 0 2 15 5 2 2 2 4 3 2 4' should be parsed as
|
||||
// '70 21 55 22 24 32 40'.
|
||||
const input = new StringStream("<7 0 2 15 5 2 2 2 4 3 2 4>");
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.getHexString()).toEqual('p!U"$2@');
|
||||
});
|
||||
});
|
||||
|
||||
describe("getName", function () {
|
||||
it("should handle Names with invalid usage of NUMBER SIGN (#)", function () {
|
||||
const inputNames = ["/# 680 0 R", "/#AQwerty", "/#A<</B"];
|
||||
const expectedNames = ["#", "#AQwerty", "#A"];
|
||||
|
||||
for (let i = 0, ii = inputNames.length; i < ii; i++) {
|
||||
const input = new StringStream(inputNames[i]);
|
||||
const lexer = new Lexer(input);
|
||||
expect(lexer.getName()).toEqual(Name.get(expectedNames[i]));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("getObj", function () {
|
||||
it(
|
||||
"should stop immediately when the start of a command is " +
|
||||
"a non-visible ASCII character (issue 13999)",
|
||||
function () {
|
||||
const input = new StringStream("\x14q\nQ");
|
||||
const lexer = new Lexer(input);
|
||||
|
||||
let obj = lexer.getObj();
|
||||
expect(obj instanceof Cmd).toEqual(true);
|
||||
expect(obj.cmd).toEqual("\x14");
|
||||
|
||||
obj = lexer.getObj();
|
||||
expect(obj instanceof Cmd).toEqual(true);
|
||||
expect(obj.cmd).toEqual("q");
|
||||
|
||||
obj = lexer.getObj();
|
||||
expect(obj instanceof Cmd).toEqual(true);
|
||||
expect(obj.cmd).toEqual("Q");
|
||||
|
||||
obj = lexer.getObj();
|
||||
expect(obj).toEqual(EOF);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Linearization", function () {
|
||||
it("should not find a linearization dictionary", function () {
|
||||
// Not an actual linearization dictionary.
|
||||
// prettier-ignore
|
||||
const stream1 = new StringStream(
|
||||
"3 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Length 4622\n" +
|
||||
"/Filter /FlateDecode\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
expect(Linearization.create(stream1)).toEqual(null);
|
||||
|
||||
// Linearization dictionary with invalid version number.
|
||||
// prettier-ignore
|
||||
const stream2 = new StringStream(
|
||||
"1 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Linearized 0\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
expect(Linearization.create(stream2)).toEqual(null);
|
||||
});
|
||||
|
||||
it("should accept a valid linearization dictionary", function () {
|
||||
// prettier-ignore
|
||||
const stream = new StringStream(
|
||||
"131 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Linearized 1\n" +
|
||||
"/O 133\n" +
|
||||
"/H [ 1388 863 ]\n" +
|
||||
"/L 90\n" +
|
||||
"/E 43573\n" +
|
||||
"/N 18\n" +
|
||||
"/T 193883\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
const expectedLinearizationDict = {
|
||||
length: 90,
|
||||
hints: [1388, 863],
|
||||
objectNumberFirst: 133,
|
||||
endFirst: 43573,
|
||||
numPages: 18,
|
||||
mainXRefEntriesOffset: 193883,
|
||||
pageFirst: 0,
|
||||
};
|
||||
expect(Linearization.create(stream)).toEqual(expectedLinearizationDict);
|
||||
});
|
||||
|
||||
it(
|
||||
"should reject a linearization dictionary with invalid " +
|
||||
"integer parameters",
|
||||
function () {
|
||||
// The /L parameter should be equal to the stream length.
|
||||
// prettier-ignore
|
||||
const stream1 = new StringStream(
|
||||
"1 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Linearized 1\n" +
|
||||
"/O 133\n" +
|
||||
"/H [ 1388 863 ]\n" +
|
||||
"/L 196622\n" +
|
||||
"/E 43573\n" +
|
||||
"/N 18\n" +
|
||||
"/T 193883\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
expect(function () {
|
||||
return Linearization.create(stream1);
|
||||
}).toThrow(
|
||||
new Error(
|
||||
'The "L" parameter in the linearization ' +
|
||||
"dictionary does not equal the stream length."
|
||||
)
|
||||
);
|
||||
|
||||
// The /E parameter should not be zero.
|
||||
// prettier-ignore
|
||||
const stream2 = new StringStream(
|
||||
"1 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Linearized 1\n" +
|
||||
"/O 133\n" +
|
||||
"/H [ 1388 863 ]\n" +
|
||||
"/L 84\n" +
|
||||
"/E 0\n" +
|
||||
"/N 18\n" +
|
||||
"/T 193883\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
expect(function () {
|
||||
return Linearization.create(stream2);
|
||||
}).toThrow(
|
||||
new Error(
|
||||
'The "E" parameter in the linearization dictionary is invalid.'
|
||||
)
|
||||
);
|
||||
|
||||
// The /O parameter should be an integer.
|
||||
// prettier-ignore
|
||||
const stream3 = new StringStream(
|
||||
"1 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Linearized 1\n" +
|
||||
"/O /abc\n" +
|
||||
"/H [ 1388 863 ]\n" +
|
||||
"/L 89\n" +
|
||||
"/E 43573\n" +
|
||||
"/N 18\n" +
|
||||
"/T 193883\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
expect(function () {
|
||||
return Linearization.create(stream3);
|
||||
}).toThrow(
|
||||
new Error(
|
||||
'The "O" parameter in the linearization dictionary is invalid.'
|
||||
)
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it("should reject a linearization dictionary with invalid hint parameters", function () {
|
||||
// The /H parameter should be an array.
|
||||
// prettier-ignore
|
||||
const stream1 = new StringStream(
|
||||
"1 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Linearized 1\n" +
|
||||
"/O 133\n" +
|
||||
"/H 1388\n" +
|
||||
"/L 80\n" +
|
||||
"/E 43573\n" +
|
||||
"/N 18\n" +
|
||||
"/T 193883\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
expect(function () {
|
||||
return Linearization.create(stream1);
|
||||
}).toThrow(
|
||||
new Error("Hint array in the linearization dictionary is invalid.")
|
||||
);
|
||||
|
||||
// The hint array should contain two, or four, elements.
|
||||
// prettier-ignore
|
||||
const stream2 = new StringStream(
|
||||
"1 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Linearized 1\n" +
|
||||
"/O 133\n" +
|
||||
"/H [ 1388 ]\n" +
|
||||
"/L 84\n" +
|
||||
"/E 43573\n" +
|
||||
"/N 18\n" +
|
||||
"/T 193883\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
expect(function () {
|
||||
return Linearization.create(stream2);
|
||||
}).toThrow(
|
||||
new Error("Hint array in the linearization dictionary is invalid.")
|
||||
);
|
||||
|
||||
// The hint array should not contain zero.
|
||||
// prettier-ignore
|
||||
const stream3 = new StringStream(
|
||||
"1 0 obj\n" +
|
||||
"<<\n" +
|
||||
"/Linearized 1\n" +
|
||||
"/O 133\n" +
|
||||
"/H [ 1388 863 0 234]\n" +
|
||||
"/L 93\n" +
|
||||
"/E 43573\n" +
|
||||
"/N 18\n" +
|
||||
"/T 193883\n" +
|
||||
">>\n" +
|
||||
"endobj"
|
||||
);
|
||||
expect(function () {
|
||||
return Linearization.create(stream3);
|
||||
}).toThrow(
|
||||
new Error("Hint (2) in the linearization dictionary is invalid.")
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
54
test/unit/pdf.image_decoders_spec.js
Normal file
54
test/unit/pdf.image_decoders_spec.js
Normal file
@@ -0,0 +1,54 @@
|
||||
/* Copyright 2023 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
getVerbosityLevel,
|
||||
setVerbosityLevel,
|
||||
VerbosityLevel,
|
||||
} from "../../src/shared/util.js";
|
||||
import { Jbig2Error, Jbig2Image } from "../../src/core/jbig2.js";
|
||||
import { JpegError, JpegImage } from "../../src/core/jpg.js";
|
||||
import { JpxError, JpxImage } from "../../src/core/jpx.js";
|
||||
|
||||
const expectedAPI = Object.freeze({
|
||||
getVerbosityLevel,
|
||||
Jbig2Error,
|
||||
Jbig2Image,
|
||||
JpegError,
|
||||
JpegImage,
|
||||
JpxError,
|
||||
JpxImage,
|
||||
setVerbosityLevel,
|
||||
VerbosityLevel,
|
||||
});
|
||||
|
||||
describe("pdfimage_api", function () {
|
||||
it("checks that the *official* PDF.js-image decoders API exposes the expected functionality", async function () {
|
||||
// eslint-disable-next-line no-unsanitized/method
|
||||
const pdfimageAPI = await import(
|
||||
typeof PDFJSDev !== "undefined" && PDFJSDev.test("LIB")
|
||||
? "../../pdf.image_decoders.js"
|
||||
: "../../src/pdf.image_decoders.js"
|
||||
);
|
||||
|
||||
// The imported Object contains an (automatically) inserted Symbol,
|
||||
// hence we copy the data to allow using a simple comparison below.
|
||||
expect({ ...pdfimageAPI }).toEqual(expectedAPI);
|
||||
|
||||
expect(Object.keys(globalThis.pdfjsImageDecoders).sort()).toEqual(
|
||||
Object.keys(expectedAPI).sort()
|
||||
);
|
||||
});
|
||||
});
|
||||
46
test/unit/pdf.worker_spec.js
Normal file
46
test/unit/pdf.worker_spec.js
Normal file
@@ -0,0 +1,46 @@
|
||||
/* Copyright 2023 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { PDFWorker } from "../../src/display/api.js";
|
||||
import { WorkerMessageHandler } from "../../src/core/worker.js";
|
||||
|
||||
const expectedAPI = Object.freeze({
|
||||
WorkerMessageHandler,
|
||||
});
|
||||
|
||||
describe("pdfworker_api", function () {
|
||||
afterEach(function () {
|
||||
// Avoid interfering with other unit-tests, since `globalThis.pdfjsWorker`
|
||||
// being defined will impact loading and usage of the worker.
|
||||
PDFWorker._resetGlobalState();
|
||||
});
|
||||
|
||||
it("checks that the *official* PDF.js-worker API exposes the expected functionality", async function () {
|
||||
// eslint-disable-next-line no-unsanitized/method
|
||||
const pdfworkerAPI = await import(
|
||||
typeof PDFJSDev !== "undefined" && PDFJSDev.test("LIB")
|
||||
? "../../pdf.worker.js"
|
||||
: "../../src/pdf.worker.js"
|
||||
);
|
||||
|
||||
// The imported Object contains an (automatically) inserted Symbol,
|
||||
// hence we copy the data to allow using a simple comparison below.
|
||||
expect({ ...pdfworkerAPI }).toEqual(expectedAPI);
|
||||
|
||||
expect(Object.keys(globalThis.pdfjsWorker).sort()).toEqual(
|
||||
Object.keys(expectedAPI).sort()
|
||||
);
|
||||
});
|
||||
});
|
||||
1216
test/unit/pdf_find_controller_spec.js
Normal file
1216
test/unit/pdf_find_controller_spec.js
Normal file
File diff suppressed because it is too large
Load Diff
56
test/unit/pdf_find_utils_spec.js
Normal file
56
test/unit/pdf_find_utils_spec.js
Normal file
@@ -0,0 +1,56 @@
|
||||
/* Copyright 2018 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { CharacterType, getCharacterType } from "../../web/pdf_find_utils.js";
|
||||
|
||||
describe("pdf_find_utils", function () {
|
||||
describe("getCharacterType", function () {
|
||||
it("gets expected character types", function () {
|
||||
const characters = {
|
||||
A: CharacterType.ALPHA_LETTER,
|
||||
a: CharacterType.ALPHA_LETTER,
|
||||
0: CharacterType.ALPHA_LETTER,
|
||||
5: CharacterType.ALPHA_LETTER,
|
||||
"\xC4": CharacterType.ALPHA_LETTER, // "Ä"
|
||||
"\xE4": CharacterType.ALPHA_LETTER, // "ä"
|
||||
_: CharacterType.ALPHA_LETTER,
|
||||
" ": CharacterType.SPACE,
|
||||
"\t": CharacterType.SPACE,
|
||||
"\r": CharacterType.SPACE,
|
||||
"\n": CharacterType.SPACE,
|
||||
"\xA0": CharacterType.SPACE, // nbsp
|
||||
"-": CharacterType.PUNCT,
|
||||
",": CharacterType.PUNCT,
|
||||
".": CharacterType.PUNCT,
|
||||
";": CharacterType.PUNCT,
|
||||
":": CharacterType.PUNCT,
|
||||
"\u2122": CharacterType.ALPHA_LETTER, // trademark
|
||||
"\u0E25": CharacterType.THAI_LETTER,
|
||||
"\u4000": CharacterType.HAN_LETTER,
|
||||
"\uF950": CharacterType.HAN_LETTER,
|
||||
"\u30C0": CharacterType.KATAKANA_LETTER,
|
||||
"\u3050": CharacterType.HIRAGANA_LETTER,
|
||||
"\uFF80": CharacterType.HALFWIDTH_KATAKANA_LETTER,
|
||||
};
|
||||
|
||||
for (const character in characters) {
|
||||
const charCode = character.charCodeAt(0);
|
||||
const type = characters[character];
|
||||
|
||||
expect(getCharacterType(charCode)).toEqual(type);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
79
test/unit/pdf_history_spec.js
Normal file
79
test/unit/pdf_history_spec.js
Normal file
@@ -0,0 +1,79 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { isDestArraysEqual, isDestHashesEqual } from "../../web/pdf_history.js";
|
||||
|
||||
describe("pdf_history", function () {
|
||||
describe("isDestHashesEqual", function () {
|
||||
it("should reject non-equal destination hashes", function () {
|
||||
expect(isDestHashesEqual(null, "page.157")).toEqual(false);
|
||||
expect(isDestHashesEqual("title.0", "page.157")).toEqual(false);
|
||||
expect(isDestHashesEqual("page=1&zoom=auto", "page.157")).toEqual(false);
|
||||
|
||||
expect(isDestHashesEqual("nameddest-page.157", "page.157")).toEqual(
|
||||
false
|
||||
);
|
||||
expect(isDestHashesEqual("page.157", "nameddest=page.157")).toEqual(
|
||||
false
|
||||
);
|
||||
|
||||
const destArrayString = JSON.stringify([
|
||||
{ num: 3757, gen: 0 },
|
||||
{ name: "XYZ" },
|
||||
92.918,
|
||||
748.972,
|
||||
null,
|
||||
]);
|
||||
expect(isDestHashesEqual(destArrayString, "page.157")).toEqual(false);
|
||||
expect(isDestHashesEqual("page.157", destArrayString)).toEqual(false);
|
||||
});
|
||||
|
||||
it("should accept equal destination hashes", function () {
|
||||
expect(isDestHashesEqual("page.157", "page.157")).toEqual(true);
|
||||
expect(isDestHashesEqual("nameddest=page.157", "page.157")).toEqual(true);
|
||||
|
||||
expect(
|
||||
isDestHashesEqual("nameddest=page.157&zoom=100", "page.157")
|
||||
).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isDestArraysEqual", function () {
|
||||
const firstDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 0, 375, null];
|
||||
const secondDest = [{ num: 5, gen: 0 }, { name: "XYZ" }, 0, 375, null];
|
||||
const thirdDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 750, 0, null];
|
||||
const fourthDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 0, 375, 1.0];
|
||||
const fifthDest = [{ gen: 0, num: 1 }, { name: "XYZ" }, 0, 375, null];
|
||||
|
||||
it("should reject non-equal destination arrays", function () {
|
||||
expect(isDestArraysEqual(firstDest, undefined)).toEqual(false);
|
||||
expect(isDestArraysEqual(firstDest, [1, 2, 3, 4, 5])).toEqual(false);
|
||||
|
||||
expect(isDestArraysEqual(firstDest, secondDest)).toEqual(false);
|
||||
expect(isDestArraysEqual(firstDest, thirdDest)).toEqual(false);
|
||||
expect(isDestArraysEqual(firstDest, fourthDest)).toEqual(false);
|
||||
});
|
||||
|
||||
it("should accept equal destination arrays", function () {
|
||||
expect(isDestArraysEqual(firstDest, firstDest)).toEqual(true);
|
||||
expect(isDestArraysEqual(firstDest, fifthDest)).toEqual(true);
|
||||
|
||||
const firstDestCopy = firstDest.slice();
|
||||
expect(firstDest).not.toBe(firstDestCopy);
|
||||
|
||||
expect(isDestArraysEqual(firstDest, firstDestCopy)).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
178
test/unit/pdf_spec.js
Normal file
178
test/unit/pdf_spec.js
Normal file
@@ -0,0 +1,178 @@
|
||||
/* Copyright 2023 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
AbortException,
|
||||
AnnotationEditorParamsType,
|
||||
AnnotationEditorType,
|
||||
AnnotationMode,
|
||||
AnnotationType,
|
||||
createValidAbsoluteUrl,
|
||||
FeatureTest,
|
||||
getUuid,
|
||||
ImageKind,
|
||||
InvalidPDFException,
|
||||
MathClamp,
|
||||
normalizeUnicode,
|
||||
OPS,
|
||||
PasswordResponses,
|
||||
PermissionFlag,
|
||||
ResponseException,
|
||||
shadow,
|
||||
updateUrlHash,
|
||||
Util,
|
||||
VerbosityLevel,
|
||||
} from "../../src/shared/util.js";
|
||||
import {
|
||||
applyOpacity,
|
||||
CSSConstants,
|
||||
fetchData,
|
||||
findContrastColor,
|
||||
getFilenameFromUrl,
|
||||
getPdfFilenameFromUrl,
|
||||
getRGB,
|
||||
getXfaPageViewport,
|
||||
isDataScheme,
|
||||
isPdfFile,
|
||||
noContextMenu,
|
||||
OutputScale,
|
||||
PDFDateString,
|
||||
PixelsPerInch,
|
||||
RenderingCancelledException,
|
||||
renderRichText,
|
||||
setLayerDimensions,
|
||||
stopEvent,
|
||||
SupportedImageMimeTypes,
|
||||
} from "../../src/display/display_utils.js";
|
||||
import {
|
||||
build,
|
||||
getDocument,
|
||||
PDFDataRangeTransport,
|
||||
PDFWorker,
|
||||
version,
|
||||
} from "../../src/display/api.js";
|
||||
import { AnnotationEditorLayer } from "../../src/display/editor/annotation_editor_layer.js";
|
||||
import { AnnotationEditorUIManager } from "../../src/display/editor/tools.js";
|
||||
import { AnnotationLayer } from "../../src/display/annotation_layer.js";
|
||||
import { ColorPicker } from "../../src/display/editor/color_picker.js";
|
||||
import { DOMSVGFactory } from "../../src/display/svg_factory.js";
|
||||
import { DrawLayer } from "../../src/display/draw_layer.js";
|
||||
import { GlobalWorkerOptions } from "../../src/display/worker_options.js";
|
||||
import { isValidExplicitDest } from "../../src/display/api_utils.js";
|
||||
import { SignatureExtractor } from "../../src/display/editor/drawers/signaturedraw.js";
|
||||
import { TextLayer } from "../../src/display/text_layer.js";
|
||||
import { TouchManager } from "../../src/display/touch_manager.js";
|
||||
import { XfaLayer } from "../../src/display/xfa_layer.js";
|
||||
|
||||
const expectedAPI = Object.freeze({
|
||||
AbortException,
|
||||
AnnotationEditorLayer,
|
||||
AnnotationEditorParamsType,
|
||||
AnnotationEditorType,
|
||||
AnnotationEditorUIManager,
|
||||
AnnotationLayer,
|
||||
AnnotationMode,
|
||||
AnnotationType,
|
||||
applyOpacity,
|
||||
build,
|
||||
ColorPicker,
|
||||
createValidAbsoluteUrl,
|
||||
CSSConstants,
|
||||
DOMSVGFactory,
|
||||
DrawLayer,
|
||||
FeatureTest,
|
||||
fetchData,
|
||||
findContrastColor,
|
||||
getDocument,
|
||||
getFilenameFromUrl,
|
||||
getPdfFilenameFromUrl,
|
||||
getRGB,
|
||||
getUuid,
|
||||
getXfaPageViewport,
|
||||
GlobalWorkerOptions,
|
||||
ImageKind,
|
||||
InvalidPDFException,
|
||||
isDataScheme,
|
||||
isPdfFile,
|
||||
isValidExplicitDest,
|
||||
MathClamp,
|
||||
noContextMenu,
|
||||
normalizeUnicode,
|
||||
OPS,
|
||||
OutputScale,
|
||||
PasswordResponses,
|
||||
PDFDataRangeTransport,
|
||||
PDFDateString,
|
||||
PDFWorker,
|
||||
PermissionFlag,
|
||||
PixelsPerInch,
|
||||
RenderingCancelledException,
|
||||
renderRichText,
|
||||
ResponseException,
|
||||
setLayerDimensions,
|
||||
shadow,
|
||||
SignatureExtractor,
|
||||
stopEvent,
|
||||
SupportedImageMimeTypes,
|
||||
TextLayer,
|
||||
TouchManager,
|
||||
updateUrlHash,
|
||||
Util,
|
||||
VerbosityLevel,
|
||||
version,
|
||||
XfaLayer,
|
||||
});
|
||||
|
||||
describe("pdfjs_api", function () {
|
||||
it("checks that the *official* PDF.js API exposes the expected functionality", async function () {
|
||||
// eslint-disable-next-line no-unsanitized/method
|
||||
const pdfjsAPI = await import(
|
||||
typeof PDFJSDev !== "undefined" && PDFJSDev.test("LIB")
|
||||
? "../../pdf.js"
|
||||
: "../../src/pdf.js"
|
||||
);
|
||||
|
||||
// The imported Object contains an (automatically) inserted Symbol,
|
||||
// hence we copy the data to allow using a simple comparison below.
|
||||
expect({ ...pdfjsAPI }).toEqual(expectedAPI);
|
||||
|
||||
expect(Object.keys(globalThis.pdfjsLib).sort()).toEqual(
|
||||
Object.keys(expectedAPI).sort()
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("web_pdfjsLib", function () {
|
||||
it("checks that the viewer re-exports the expected API functionality", async function () {
|
||||
// Load the API globally, as the viewer does.
|
||||
// eslint-disable-next-line no-unsanitized/method
|
||||
await import(
|
||||
typeof PDFJSDev !== "undefined" && PDFJSDev.test("LIB")
|
||||
? "../../../generic-legacy/build/pdf.mjs"
|
||||
: "../../build/generic/build/pdf.mjs"
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-unsanitized/method
|
||||
const webPdfjsLib = await import(
|
||||
typeof PDFJSDev !== "undefined" && PDFJSDev.test("LIB")
|
||||
? "../../../../web/pdfjs.js"
|
||||
: "../../web/pdfjs.js"
|
||||
);
|
||||
|
||||
expect(Object.keys(webPdfjsLib).sort()).toEqual(
|
||||
Object.keys(expectedAPI).sort()
|
||||
);
|
||||
});
|
||||
});
|
||||
79
test/unit/pdf_viewer.component_spec.js
Normal file
79
test/unit/pdf_viewer.component_spec.js
Normal file
@@ -0,0 +1,79 @@
|
||||
/* Copyright 2023 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { FindState, PDFFindController } from "../../web/pdf_find_controller.js";
|
||||
import {
|
||||
LinkTarget,
|
||||
PDFLinkService,
|
||||
SimpleLinkService,
|
||||
} from "../../web/pdf_link_service.js";
|
||||
import {
|
||||
parseQueryString,
|
||||
ProgressBar,
|
||||
RenderingStates,
|
||||
ScrollMode,
|
||||
SpreadMode,
|
||||
} from "../../web/ui_utils.js";
|
||||
import { AnnotationLayerBuilder } from "../../web/annotation_layer_builder.js";
|
||||
import { DownloadManager } from "../../web/download_manager.js";
|
||||
import { EventBus } from "../../web/event_utils.js";
|
||||
import { GenericL10n } from "../../web/genericl10n.js";
|
||||
import { PDFHistory } from "../../web/pdf_history.js";
|
||||
import { PDFPageView } from "../../web/pdf_page_view.js";
|
||||
import { PDFScriptingManager } from "../../web/pdf_scripting_manager.component.js";
|
||||
import { PDFSinglePageViewer } from "../../web/pdf_single_page_viewer.js";
|
||||
import { PDFViewer } from "../../web/pdf_viewer.js";
|
||||
import { StructTreeLayerBuilder } from "../../web/struct_tree_layer_builder.js";
|
||||
import { TextLayerBuilder } from "../../web/text_layer_builder.js";
|
||||
import { XfaLayerBuilder } from "../../web/xfa_layer_builder.js";
|
||||
|
||||
const expectedAPI = Object.freeze({
|
||||
AnnotationLayerBuilder,
|
||||
DownloadManager,
|
||||
EventBus,
|
||||
FindState,
|
||||
GenericL10n,
|
||||
LinkTarget,
|
||||
parseQueryString,
|
||||
PDFFindController,
|
||||
PDFHistory,
|
||||
PDFLinkService,
|
||||
PDFPageView,
|
||||
PDFScriptingManager,
|
||||
PDFSinglePageViewer,
|
||||
PDFViewer,
|
||||
ProgressBar,
|
||||
RenderingStates,
|
||||
ScrollMode,
|
||||
SimpleLinkService,
|
||||
SpreadMode,
|
||||
StructTreeLayerBuilder,
|
||||
TextLayerBuilder,
|
||||
XfaLayerBuilder,
|
||||
});
|
||||
|
||||
describe("pdfviewer_api", function () {
|
||||
it("checks that the *official* PDF.js-viewer API exposes the expected functionality", async function () {
|
||||
const pdfviewerAPI = await import("../../web/pdf_viewer.component.js");
|
||||
|
||||
// The imported Object contains an (automatically) inserted Symbol,
|
||||
// hence we copy the data to allow using a simple comparison below.
|
||||
expect({ ...pdfviewerAPI }).toEqual(expectedAPI);
|
||||
|
||||
expect(Object.keys(globalThis.pdfjsViewer).sort()).toEqual(
|
||||
Object.keys(expectedAPI).sort()
|
||||
);
|
||||
});
|
||||
});
|
||||
162
test/unit/pdf_viewer_spec.js
Normal file
162
test/unit/pdf_viewer_spec.js
Normal file
@@ -0,0 +1,162 @@
|
||||
/* Copyright 2021 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { PDFPageViewBuffer } from "../../web/pdf_viewer.js";
|
||||
|
||||
describe("PDFViewer", function () {
|
||||
describe("PDFPageViewBuffer", function () {
|
||||
function createViewsMap(startId, endId) {
|
||||
const map = new Map();
|
||||
|
||||
for (let id = startId; id <= endId; id++) {
|
||||
map.set(id, {
|
||||
id,
|
||||
destroy: () => {},
|
||||
});
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
it("handles `push` correctly", function () {
|
||||
const buffer = new PDFPageViewBuffer(3);
|
||||
|
||||
const viewsMap = createViewsMap(1, 5),
|
||||
iterator = viewsMap.values();
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const view = iterator.next().value;
|
||||
buffer.push(view);
|
||||
}
|
||||
// Ensure that the correct views are inserted.
|
||||
expect([...buffer]).toEqual([
|
||||
viewsMap.get(1),
|
||||
viewsMap.get(2),
|
||||
viewsMap.get(3),
|
||||
]);
|
||||
|
||||
for (let i = 3; i < 5; i++) {
|
||||
const view = iterator.next().value;
|
||||
buffer.push(view);
|
||||
}
|
||||
// Ensure that the correct views are evicted.
|
||||
expect([...buffer]).toEqual([
|
||||
viewsMap.get(3),
|
||||
viewsMap.get(4),
|
||||
viewsMap.get(5),
|
||||
]);
|
||||
});
|
||||
|
||||
it("handles `resize` correctly", function () {
|
||||
const buffer = new PDFPageViewBuffer(5);
|
||||
|
||||
const viewsMap = createViewsMap(1, 5),
|
||||
iterator = viewsMap.values();
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const view = iterator.next().value;
|
||||
buffer.push(view);
|
||||
}
|
||||
// Ensure that keeping the size constant won't evict any views.
|
||||
buffer.resize(5);
|
||||
|
||||
expect([...buffer]).toEqual([
|
||||
viewsMap.get(1),
|
||||
viewsMap.get(2),
|
||||
viewsMap.get(3),
|
||||
viewsMap.get(4),
|
||||
viewsMap.get(5),
|
||||
]);
|
||||
|
||||
// Ensure that increasing the size won't evict any views.
|
||||
buffer.resize(10);
|
||||
|
||||
expect([...buffer]).toEqual([
|
||||
viewsMap.get(1),
|
||||
viewsMap.get(2),
|
||||
viewsMap.get(3),
|
||||
viewsMap.get(4),
|
||||
viewsMap.get(5),
|
||||
]);
|
||||
|
||||
// Ensure that decreasing the size will evict the correct views.
|
||||
buffer.resize(3);
|
||||
|
||||
expect([...buffer]).toEqual([
|
||||
viewsMap.get(3),
|
||||
viewsMap.get(4),
|
||||
viewsMap.get(5),
|
||||
]);
|
||||
});
|
||||
|
||||
it("handles `resize` correctly, with `idsToKeep` provided", function () {
|
||||
const buffer = new PDFPageViewBuffer(5);
|
||||
|
||||
const viewsMap = createViewsMap(1, 5),
|
||||
iterator = viewsMap.values();
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const view = iterator.next().value;
|
||||
buffer.push(view);
|
||||
}
|
||||
// Ensure that keeping the size constant won't evict any views,
|
||||
// while re-ordering them correctly.
|
||||
buffer.resize(5, new Set([1, 2]));
|
||||
|
||||
expect([...buffer]).toEqual([
|
||||
viewsMap.get(3),
|
||||
viewsMap.get(4),
|
||||
viewsMap.get(5),
|
||||
viewsMap.get(1),
|
||||
viewsMap.get(2),
|
||||
]);
|
||||
|
||||
// Ensure that increasing the size won't evict any views,
|
||||
// while re-ordering them correctly.
|
||||
buffer.resize(10, new Set([3, 4, 5]));
|
||||
|
||||
expect([...buffer]).toEqual([
|
||||
viewsMap.get(1),
|
||||
viewsMap.get(2),
|
||||
viewsMap.get(3),
|
||||
viewsMap.get(4),
|
||||
viewsMap.get(5),
|
||||
]);
|
||||
|
||||
// Ensure that decreasing the size will evict the correct views,
|
||||
// while re-ordering the remaining ones correctly.
|
||||
buffer.resize(3, new Set([1, 2, 5]));
|
||||
|
||||
expect([...buffer]).toEqual([
|
||||
viewsMap.get(1),
|
||||
viewsMap.get(2),
|
||||
viewsMap.get(5),
|
||||
]);
|
||||
});
|
||||
|
||||
it("handles `has` correctly", function () {
|
||||
const buffer = new PDFPageViewBuffer(3);
|
||||
|
||||
const viewsMap = createViewsMap(1, 2),
|
||||
iterator = viewsMap.values();
|
||||
|
||||
for (let i = 0; i < 1; i++) {
|
||||
const view = iterator.next().value;
|
||||
buffer.push(view);
|
||||
}
|
||||
expect(buffer.has(viewsMap.get(1))).toEqual(true);
|
||||
expect(buffer.has(viewsMap.get(2))).toEqual(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
640
test/unit/primitives_spec.js
Normal file
640
test/unit/primitives_spec.js
Normal file
@@ -0,0 +1,640 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
Cmd,
|
||||
Dict,
|
||||
isCmd,
|
||||
isDict,
|
||||
isName,
|
||||
isRefsEqual,
|
||||
Name,
|
||||
Ref,
|
||||
RefSet,
|
||||
RefSetCache,
|
||||
} from "../../src/core/primitives.js";
|
||||
import { StringStream } from "../../src/core/stream.js";
|
||||
import { XRefMock } from "./test_utils.js";
|
||||
|
||||
describe("primitives", function () {
|
||||
describe("Name", function () {
|
||||
it("should retain the given name", function () {
|
||||
const givenName = "Font";
|
||||
const name = Name.get(givenName);
|
||||
expect(name.name).toEqual(givenName);
|
||||
});
|
||||
|
||||
it("should create only one object for a name and cache it", function () {
|
||||
const firstFont = Name.get("Font");
|
||||
const secondFont = Name.get("Font");
|
||||
const firstSubtype = Name.get("Subtype");
|
||||
const secondSubtype = Name.get("Subtype");
|
||||
|
||||
expect(firstFont).toBe(secondFont);
|
||||
expect(firstSubtype).toBe(secondSubtype);
|
||||
expect(firstFont).not.toBe(firstSubtype);
|
||||
});
|
||||
|
||||
it("should create only one object for *empty* names and cache it", function () {
|
||||
const firstEmpty = Name.get("");
|
||||
const secondEmpty = Name.get("");
|
||||
const normalName = Name.get("string");
|
||||
|
||||
expect(firstEmpty).toBe(secondEmpty);
|
||||
expect(firstEmpty).not.toBe(normalName);
|
||||
});
|
||||
|
||||
it("should not accept to create a non-string name", function () {
|
||||
expect(function () {
|
||||
Name.get(123);
|
||||
}).toThrow(new Error('Name: The "name" must be a string.'));
|
||||
});
|
||||
});
|
||||
|
||||
describe("Cmd", function () {
|
||||
it("should retain the given cmd name", function () {
|
||||
const givenCmd = "BT";
|
||||
const cmd = Cmd.get(givenCmd);
|
||||
expect(cmd.cmd).toEqual(givenCmd);
|
||||
});
|
||||
|
||||
it("should create only one object for a command and cache it", function () {
|
||||
const firstBT = Cmd.get("BT");
|
||||
const secondBT = Cmd.get("BT");
|
||||
const firstET = Cmd.get("ET");
|
||||
const secondET = Cmd.get("ET");
|
||||
|
||||
expect(firstBT).toBe(secondBT);
|
||||
expect(firstET).toBe(secondET);
|
||||
expect(firstBT).not.toBe(firstET);
|
||||
});
|
||||
|
||||
it("should not accept to create a non-string cmd", function () {
|
||||
expect(function () {
|
||||
Cmd.get(123);
|
||||
}).toThrow(new Error('Cmd: The "cmd" must be a string.'));
|
||||
});
|
||||
});
|
||||
|
||||
describe("Dict", function () {
|
||||
const checkInvalidHasValues = function (dict) {
|
||||
expect(dict.has()).toBeFalsy();
|
||||
expect(dict.has("Prev")).toBeFalsy();
|
||||
};
|
||||
|
||||
const checkInvalidKeyValues = function (dict) {
|
||||
expect(dict.get()).toBeUndefined();
|
||||
expect(dict.get("Prev")).toBeUndefined();
|
||||
expect(dict.get("D", "Decode")).toBeUndefined();
|
||||
expect(dict.get("FontFile", "FontFile2", "FontFile3")).toBeUndefined();
|
||||
};
|
||||
|
||||
let emptyDict, dictWithSizeKey, dictWithManyKeys;
|
||||
const storedSize = 42;
|
||||
const testFontFile = "file1";
|
||||
const testFontFile2 = "file2";
|
||||
const testFontFile3 = "file3";
|
||||
|
||||
beforeAll(function () {
|
||||
emptyDict = new Dict();
|
||||
|
||||
dictWithSizeKey = new Dict();
|
||||
dictWithSizeKey.set("Size", storedSize);
|
||||
|
||||
dictWithManyKeys = new Dict();
|
||||
dictWithManyKeys.set("FontFile", testFontFile);
|
||||
dictWithManyKeys.set("FontFile2", testFontFile2);
|
||||
dictWithManyKeys.set("FontFile3", testFontFile3);
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
emptyDict = dictWithSizeKey = dictWithManyKeys = null;
|
||||
});
|
||||
|
||||
it("should allow assigning an XRef table after creation", function () {
|
||||
const dict = new Dict(null);
|
||||
expect(dict.xref).toEqual(null);
|
||||
|
||||
const xref = new XRefMock([]);
|
||||
dict.assignXref(xref);
|
||||
expect(dict.xref).toEqual(xref);
|
||||
});
|
||||
|
||||
it("should return correct size", function () {
|
||||
const dict = new Dict(null);
|
||||
expect(dict.size).toEqual(0);
|
||||
|
||||
dict.set("Type", Name.get("Page"));
|
||||
expect(dict.size).toEqual(1);
|
||||
|
||||
dict.set("Contents", Ref.get(10, 0));
|
||||
expect(dict.size).toEqual(2);
|
||||
});
|
||||
|
||||
it("should return invalid values for unknown keys", function () {
|
||||
checkInvalidHasValues(emptyDict);
|
||||
checkInvalidKeyValues(emptyDict);
|
||||
});
|
||||
|
||||
it("should return correct value for stored Size key", function () {
|
||||
expect(dictWithSizeKey.has("Size")).toBeTruthy();
|
||||
|
||||
expect(dictWithSizeKey.get("Size")).toEqual(storedSize);
|
||||
expect(dictWithSizeKey.get("Prev", "Size")).toEqual(storedSize);
|
||||
expect(dictWithSizeKey.get("Prev", "Root", "Size")).toEqual(storedSize);
|
||||
});
|
||||
|
||||
it("should return invalid values for unknown keys when Size key is stored", function () {
|
||||
checkInvalidHasValues(dictWithSizeKey);
|
||||
checkInvalidKeyValues(dictWithSizeKey);
|
||||
});
|
||||
|
||||
it("should not accept to set a non-string key", function () {
|
||||
const dict = new Dict();
|
||||
expect(function () {
|
||||
dict.set(123, "val");
|
||||
}).toThrow(new Error('Dict.set: The "key" must be a string.'));
|
||||
|
||||
expect(dict.has(123)).toBeFalsy();
|
||||
|
||||
checkInvalidKeyValues(dict);
|
||||
});
|
||||
|
||||
it("should not accept to set a key with an undefined value", function () {
|
||||
const dict = new Dict();
|
||||
expect(function () {
|
||||
dict.set("Size");
|
||||
}).toThrow(new Error('Dict.set: The "value" cannot be undefined.'));
|
||||
|
||||
expect(dict.has("Size")).toBeFalsy();
|
||||
|
||||
checkInvalidKeyValues(dict);
|
||||
});
|
||||
|
||||
it("should return correct values for multiple stored keys", function () {
|
||||
expect(dictWithManyKeys.has("FontFile")).toBeTruthy();
|
||||
expect(dictWithManyKeys.has("FontFile2")).toBeTruthy();
|
||||
expect(dictWithManyKeys.has("FontFile3")).toBeTruthy();
|
||||
|
||||
expect(dictWithManyKeys.get("FontFile3")).toEqual(testFontFile3);
|
||||
expect(dictWithManyKeys.get("FontFile2", "FontFile3")).toEqual(
|
||||
testFontFile2
|
||||
);
|
||||
expect(
|
||||
dictWithManyKeys.get("FontFile", "FontFile2", "FontFile3")
|
||||
).toEqual(testFontFile);
|
||||
});
|
||||
|
||||
it("should asynchronously fetch unknown keys", async function () {
|
||||
const keyPromises = [
|
||||
dictWithManyKeys.getAsync("Size"),
|
||||
dictWithSizeKey.getAsync("FontFile", "FontFile2", "FontFile3"),
|
||||
];
|
||||
|
||||
const values = await Promise.all(keyPromises);
|
||||
expect(values[0]).toBeUndefined();
|
||||
expect(values[1]).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should asynchronously fetch correct values for multiple stored keys", async function () {
|
||||
const keyPromises = [
|
||||
dictWithManyKeys.getAsync("FontFile3"),
|
||||
dictWithManyKeys.getAsync("FontFile2", "FontFile3"),
|
||||
dictWithManyKeys.getAsync("FontFile", "FontFile2", "FontFile3"),
|
||||
];
|
||||
|
||||
const values = await Promise.all(keyPromises);
|
||||
expect(values[0]).toEqual(testFontFile3);
|
||||
expect(values[1]).toEqual(testFontFile2);
|
||||
expect(values[2]).toEqual(testFontFile);
|
||||
});
|
||||
|
||||
it("should iterate through each stored key", function () {
|
||||
expect([...dictWithManyKeys]).toEqual([
|
||||
["FontFile", testFontFile],
|
||||
["FontFile2", testFontFile2],
|
||||
["FontFile3", testFontFile3],
|
||||
]);
|
||||
});
|
||||
|
||||
it("should handle keys pointing to indirect objects, both sync and async", async function () {
|
||||
const fontRef = Ref.get(1, 0);
|
||||
const xref = new XRefMock([{ ref: fontRef, data: testFontFile }]);
|
||||
const fontDict = new Dict(xref);
|
||||
fontDict.set("FontFile", fontRef);
|
||||
|
||||
expect(fontDict.getRaw("FontFile")).toEqual(fontRef);
|
||||
expect(fontDict.get("FontFile", "FontFile2", "FontFile3")).toEqual(
|
||||
testFontFile
|
||||
);
|
||||
|
||||
const value = await fontDict.getAsync(
|
||||
"FontFile",
|
||||
"FontFile2",
|
||||
"FontFile3"
|
||||
);
|
||||
expect(value).toEqual(testFontFile);
|
||||
});
|
||||
|
||||
it("should handle arrays containing indirect objects", function () {
|
||||
const minCoordRef = Ref.get(1, 0);
|
||||
const maxCoordRef = Ref.get(2, 0);
|
||||
const minCoord = 0;
|
||||
const maxCoord = 1;
|
||||
const xref = new XRefMock([
|
||||
{ ref: minCoordRef, data: minCoord },
|
||||
{ ref: maxCoordRef, data: maxCoord },
|
||||
]);
|
||||
const xObjectDict = new Dict(xref);
|
||||
xObjectDict.set("BBox", [minCoord, maxCoord, minCoordRef, maxCoordRef]);
|
||||
|
||||
expect(xObjectDict.get("BBox")).toEqual([
|
||||
minCoord,
|
||||
maxCoord,
|
||||
minCoordRef,
|
||||
maxCoordRef,
|
||||
]);
|
||||
expect(xObjectDict.getArray("BBox")).toEqual([
|
||||
minCoord,
|
||||
maxCoord,
|
||||
minCoord,
|
||||
maxCoord,
|
||||
]);
|
||||
});
|
||||
|
||||
it("should get all key names", function () {
|
||||
const expectedKeys = ["FontFile", "FontFile2", "FontFile3"];
|
||||
const keys = dictWithManyKeys.getKeys();
|
||||
|
||||
expect(keys.sort()).toEqual(expectedKeys);
|
||||
});
|
||||
|
||||
it("should get all raw values", function () {
|
||||
// Test direct objects:
|
||||
const expectedRawValues1 = [testFontFile, testFontFile2, testFontFile3];
|
||||
const rawValues1 = dictWithManyKeys.getRawValues();
|
||||
|
||||
expect(rawValues1.sort()).toEqual(expectedRawValues1);
|
||||
|
||||
// Test indirect objects:
|
||||
const typeName = Name.get("Page");
|
||||
const resources = new Dict(null),
|
||||
resourcesRef = Ref.get(5, 0);
|
||||
const contents = new StringStream("data"),
|
||||
contentsRef = Ref.get(10, 0);
|
||||
const xref = new XRefMock([
|
||||
{ ref: resourcesRef, data: resources },
|
||||
{ ref: contentsRef, data: contents },
|
||||
]);
|
||||
|
||||
const dict = new Dict(xref);
|
||||
dict.set("Type", typeName);
|
||||
dict.set("Resources", resourcesRef);
|
||||
dict.set("Contents", contentsRef);
|
||||
|
||||
const expectedRawValues2 = [contentsRef, resourcesRef, typeName];
|
||||
const rawValues2 = dict.getRawValues();
|
||||
|
||||
expect(rawValues2.sort()).toEqual(expectedRawValues2);
|
||||
});
|
||||
|
||||
it("should create only one object for Dict.empty", function () {
|
||||
const firstDictEmpty = Dict.empty;
|
||||
const secondDictEmpty = Dict.empty;
|
||||
|
||||
expect(firstDictEmpty).toBe(secondDictEmpty);
|
||||
expect(firstDictEmpty).not.toBe(emptyDict);
|
||||
});
|
||||
|
||||
it("should correctly merge dictionaries", function () {
|
||||
const expectedKeys = ["FontFile", "FontFile2", "FontFile3", "Size"];
|
||||
|
||||
const fontFileDict = new Dict();
|
||||
fontFileDict.set("FontFile", "Type1 font file");
|
||||
const mergedDict = Dict.merge({
|
||||
xref: null,
|
||||
dictArray: [dictWithManyKeys, dictWithSizeKey, fontFileDict],
|
||||
});
|
||||
const mergedKeys = mergedDict.getKeys();
|
||||
|
||||
expect(mergedKeys.sort()).toEqual(expectedKeys);
|
||||
expect(mergedDict.get("FontFile")).toEqual(testFontFile);
|
||||
});
|
||||
|
||||
it("should correctly merge sub-dictionaries", function () {
|
||||
const localFontDict = new Dict();
|
||||
localFontDict.set("F1", "Local font one");
|
||||
|
||||
const globalFontDict = new Dict();
|
||||
globalFontDict.set("F1", "Global font one");
|
||||
globalFontDict.set("F2", "Global font two");
|
||||
globalFontDict.set("F3", "Global font three");
|
||||
|
||||
const localDict = new Dict();
|
||||
localDict.set("Font", localFontDict);
|
||||
|
||||
const globalDict = new Dict();
|
||||
globalDict.set("Font", globalFontDict);
|
||||
|
||||
const mergedDict = Dict.merge({
|
||||
xref: null,
|
||||
dictArray: [localDict, globalDict],
|
||||
});
|
||||
const mergedSubDict = Dict.merge({
|
||||
xref: null,
|
||||
dictArray: [localDict, globalDict],
|
||||
mergeSubDicts: true,
|
||||
});
|
||||
|
||||
const mergedFontDict = mergedDict.get("Font");
|
||||
const mergedSubFontDict = mergedSubDict.get("Font");
|
||||
|
||||
expect(mergedFontDict instanceof Dict).toEqual(true);
|
||||
expect(mergedSubFontDict instanceof Dict).toEqual(true);
|
||||
|
||||
const mergedFontDictKeys = mergedFontDict.getKeys();
|
||||
const mergedSubFontDictKeys = mergedSubFontDict.getKeys();
|
||||
|
||||
expect(mergedFontDictKeys).toEqual(["F1"]);
|
||||
expect(mergedSubFontDictKeys).toEqual(["F1", "F2", "F3"]);
|
||||
|
||||
const mergedFontDictValues = mergedFontDict.getRawValues();
|
||||
const mergedSubFontDictValues = mergedSubFontDict.getRawValues();
|
||||
|
||||
expect(mergedFontDictValues).toEqual(["Local font one"]);
|
||||
expect(mergedSubFontDictValues).toEqual([
|
||||
"Local font one",
|
||||
"Global font two",
|
||||
"Global font three",
|
||||
]);
|
||||
});
|
||||
|
||||
it("should set the values if they're as expected", function () {
|
||||
const dict = new Dict();
|
||||
dict.set("key", "value");
|
||||
|
||||
dict.setIfNotExists("key", "new value");
|
||||
expect(dict.get("key")).toEqual("value");
|
||||
|
||||
dict.setIfNotExists("key1", "value");
|
||||
expect(dict.get("key1")).toEqual("value");
|
||||
|
||||
dict.setIfNumber("a", 123);
|
||||
expect(dict.get("a")).toEqual(123);
|
||||
|
||||
dict.setIfNumber("b", "not a number");
|
||||
expect(dict.has("b")).toBeFalse();
|
||||
|
||||
dict.setIfArray("c", [1, 2, 3]);
|
||||
expect(dict.get("c")).toEqual([1, 2, 3]);
|
||||
|
||||
dict.setIfArray("d", new Uint8Array([4, 5, 6]));
|
||||
expect(dict.get("d")).toEqual(new Uint8Array([4, 5, 6]));
|
||||
|
||||
dict.setIfArray("e", "not an array");
|
||||
expect(dict.has("e")).toBeFalse();
|
||||
|
||||
dict.setIfDefined("f", "defined");
|
||||
expect(dict.get("f")).toEqual("defined");
|
||||
|
||||
dict.setIfDefined("g", undefined);
|
||||
expect(dict.has("g")).toBeFalse();
|
||||
|
||||
dict.setIfDefined("h", null);
|
||||
expect(dict.has("h")).toBeFalse();
|
||||
|
||||
dict.setIfName("i", Name.get("name"));
|
||||
expect(dict.get("i")).toEqual(Name.get("name"));
|
||||
|
||||
dict.setIfName("j", "name");
|
||||
expect(dict.get("j")).toEqual(Name.get("name"));
|
||||
|
||||
dict.setIfName("k", 1234);
|
||||
expect(dict.has("k")).toBeFalse();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Ref", function () {
|
||||
it("should get a string representation", function () {
|
||||
const nonZeroRef = Ref.get(4, 2);
|
||||
expect(nonZeroRef.toString()).toEqual("4R2");
|
||||
|
||||
// If the generation number is 0, a shorter representation is used.
|
||||
const zeroRef = Ref.get(4, 0);
|
||||
expect(zeroRef.toString()).toEqual("4R");
|
||||
});
|
||||
|
||||
it("should retain the stored values", function () {
|
||||
const storedNum = 4;
|
||||
const storedGen = 2;
|
||||
const ref = Ref.get(storedNum, storedGen);
|
||||
expect(ref.num).toEqual(storedNum);
|
||||
expect(ref.gen).toEqual(storedGen);
|
||||
});
|
||||
|
||||
it("should create only one object for a reference and cache it", function () {
|
||||
const firstRef = Ref.get(4, 2);
|
||||
const secondRef = Ref.get(4, 2);
|
||||
const firstOtherRef = Ref.get(5, 2);
|
||||
const secondOtherRef = Ref.get(5, 2);
|
||||
|
||||
expect(firstRef).toBe(secondRef);
|
||||
expect(firstOtherRef).toBe(secondOtherRef);
|
||||
expect(firstRef).not.toBe(firstOtherRef);
|
||||
});
|
||||
});
|
||||
|
||||
describe("RefSet", function () {
|
||||
const ref1 = Ref.get(4, 2),
|
||||
ref2 = Ref.get(5, 2);
|
||||
let refSet;
|
||||
|
||||
beforeEach(function () {
|
||||
refSet = new RefSet();
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
refSet = null;
|
||||
});
|
||||
|
||||
it("should have a stored value", function () {
|
||||
refSet.put(ref1);
|
||||
expect(refSet.has(ref1)).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should not have an unknown value", function () {
|
||||
expect(refSet.has(ref1)).toBeFalsy();
|
||||
refSet.put(ref1);
|
||||
expect(refSet.has(ref2)).toBeFalsy();
|
||||
});
|
||||
|
||||
it("should support iteration", function () {
|
||||
refSet.put(ref1);
|
||||
refSet.put(ref2);
|
||||
expect([...refSet]).toEqual([ref1.toString(), ref2.toString()]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("RefSetCache", function () {
|
||||
const ref1 = Ref.get(4, 2),
|
||||
ref2 = Ref.get(5, 2),
|
||||
obj1 = Name.get("foo"),
|
||||
obj2 = Name.get("bar");
|
||||
let cache;
|
||||
|
||||
beforeEach(function () {
|
||||
cache = new RefSetCache();
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
cache = null;
|
||||
});
|
||||
|
||||
it("should put, have and get a value", function () {
|
||||
cache.put(ref1, obj1);
|
||||
expect(cache.has(ref1)).toBeTruthy();
|
||||
expect(cache.has(ref2)).toBeFalsy();
|
||||
expect(cache.get(ref1)).toBe(obj1);
|
||||
});
|
||||
|
||||
it("should put, have and get a value by alias", function () {
|
||||
cache.put(ref1, obj1);
|
||||
cache.putAlias(ref2, ref1);
|
||||
expect(cache.has(ref1)).toBeTruthy();
|
||||
expect(cache.has(ref2)).toBeTruthy();
|
||||
expect(cache.get(ref1)).toBe(obj1);
|
||||
expect(cache.get(ref2)).toBe(obj1);
|
||||
});
|
||||
|
||||
it("should report the size of the cache", function () {
|
||||
cache.put(ref1, obj1);
|
||||
expect(cache.size).toEqual(1);
|
||||
cache.put(ref2, obj2);
|
||||
expect(cache.size).toEqual(2);
|
||||
});
|
||||
|
||||
it("should clear the cache", function () {
|
||||
cache.put(ref1, obj1);
|
||||
expect(cache.size).toEqual(1);
|
||||
cache.clear();
|
||||
expect(cache.size).toEqual(0);
|
||||
});
|
||||
|
||||
it("should support iteration", function () {
|
||||
cache.put(ref1, obj1);
|
||||
cache.put(ref2, obj2);
|
||||
expect([...cache]).toEqual([obj1, obj2]);
|
||||
});
|
||||
|
||||
it("should support iteration over key-value pairs", function () {
|
||||
cache.put(ref1, obj1);
|
||||
cache.put(ref2, obj2);
|
||||
expect([...cache.items()]).toEqual([
|
||||
[ref1, obj1],
|
||||
[ref2, obj2],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isName", function () {
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
|
||||
it("handles non-names", function () {
|
||||
const nonName = {};
|
||||
expect(isName(nonName)).toEqual(false);
|
||||
});
|
||||
|
||||
it("handles names", function () {
|
||||
const name = Name.get("Font");
|
||||
expect(isName(name)).toEqual(true);
|
||||
});
|
||||
|
||||
it("handles names with name check", function () {
|
||||
const name = Name.get("Font");
|
||||
expect(isName(name, "Font")).toEqual(true);
|
||||
expect(isName(name, "Subtype")).toEqual(false);
|
||||
});
|
||||
|
||||
it("handles *empty* names, with name check", function () {
|
||||
const emptyName = Name.get("");
|
||||
|
||||
expect(isName(emptyName)).toEqual(true);
|
||||
expect(isName(emptyName, "")).toEqual(true);
|
||||
expect(isName(emptyName, "string")).toEqual(false);
|
||||
});
|
||||
|
||||
/* eslint-enable no-restricted-syntax */
|
||||
});
|
||||
|
||||
describe("isCmd", function () {
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
|
||||
it("handles non-commands", function () {
|
||||
const nonCmd = {};
|
||||
expect(isCmd(nonCmd)).toEqual(false);
|
||||
});
|
||||
|
||||
it("handles commands", function () {
|
||||
const cmd = Cmd.get("BT");
|
||||
expect(isCmd(cmd)).toEqual(true);
|
||||
});
|
||||
|
||||
it("handles commands with cmd check", function () {
|
||||
const cmd = Cmd.get("BT");
|
||||
expect(isCmd(cmd, "BT")).toEqual(true);
|
||||
expect(isCmd(cmd, "ET")).toEqual(false);
|
||||
});
|
||||
|
||||
/* eslint-enable no-restricted-syntax */
|
||||
});
|
||||
|
||||
describe("isDict", function () {
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
|
||||
it("handles non-dictionaries", function () {
|
||||
const nonDict = {};
|
||||
expect(isDict(nonDict)).toEqual(false);
|
||||
});
|
||||
|
||||
it("handles empty dictionaries with type check", function () {
|
||||
const dict = Dict.empty;
|
||||
expect(isDict(dict)).toEqual(true);
|
||||
expect(isDict(dict, "Page")).toEqual(false);
|
||||
});
|
||||
|
||||
it("handles dictionaries with type check", function () {
|
||||
const dict = new Dict();
|
||||
dict.set("Type", Name.get("Page"));
|
||||
expect(isDict(dict, "Page")).toEqual(true);
|
||||
expect(isDict(dict, "Contents")).toEqual(false);
|
||||
});
|
||||
|
||||
/* eslint-enable no-restricted-syntax */
|
||||
});
|
||||
|
||||
describe("isRefsEqual", function () {
|
||||
it("should handle Refs pointing to the same object", function () {
|
||||
const ref1 = Ref.get(1, 0);
|
||||
const ref2 = Ref.get(1, 0);
|
||||
expect(isRefsEqual(ref1, ref2)).toEqual(true);
|
||||
});
|
||||
|
||||
it("should handle Refs pointing to different objects", function () {
|
||||
const ref1 = Ref.get(1, 0);
|
||||
const ref2 = Ref.get(2, 0);
|
||||
expect(isRefsEqual(ref1, ref2)).toEqual(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
2025
test/unit/scripting_spec.js
Normal file
2025
test/unit/scripting_spec.js
Normal file
File diff suppressed because it is too large
Load Diff
41
test/unit/stream_spec.js
Normal file
41
test/unit/stream_spec.js
Normal file
@@ -0,0 +1,41 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Dict } from "../../src/core/primitives.js";
|
||||
import { PredictorStream } from "../../src/core/predictor_stream.js";
|
||||
import { Stream } from "../../src/core/stream.js";
|
||||
|
||||
describe("stream", function () {
|
||||
describe("PredictorStream", function () {
|
||||
it("should decode simple predictor data", function () {
|
||||
const dict = new Dict();
|
||||
dict.set("Predictor", 12);
|
||||
dict.set("Colors", 1);
|
||||
dict.set("BitsPerComponent", 8);
|
||||
dict.set("Columns", 2);
|
||||
|
||||
const input = new Stream(
|
||||
new Uint8Array([2, 100, 3, 2, 1, 255, 2, 1, 255]),
|
||||
0,
|
||||
9,
|
||||
dict
|
||||
);
|
||||
const predictor = new PredictorStream(input, /* length = */ 9, dict);
|
||||
const result = predictor.getBytes(6);
|
||||
|
||||
expect(result).toEqual(new Uint8Array([100, 3, 101, 2, 102, 1]));
|
||||
});
|
||||
});
|
||||
});
|
||||
154
test/unit/struct_tree_spec.js
Normal file
154
test/unit/struct_tree_spec.js
Normal file
@@ -0,0 +1,154 @@
|
||||
/* Copyright 2021 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { buildGetDocumentParams } from "./test_utils.js";
|
||||
import { getDocument } from "../../src/display/api.js";
|
||||
|
||||
function equalTrees(rootA, rootB) {
|
||||
function walk(a, b) {
|
||||
expect(a.role).toEqual(b.role);
|
||||
expect(a.lang).toEqual(b.lang);
|
||||
expect(a.type).toEqual(b.type);
|
||||
expect("children" in a).toEqual("children" in b);
|
||||
if (!a.children) {
|
||||
return;
|
||||
}
|
||||
expect(a.children.length).toEqual(b.children.length);
|
||||
for (let i = 0; i < rootA.children.length; i++) {
|
||||
walk(a.children[i], b.children[i]);
|
||||
}
|
||||
}
|
||||
return walk(rootA, rootB);
|
||||
}
|
||||
|
||||
describe("struct tree", function () {
|
||||
describe("getStructTree", function () {
|
||||
it("parses basic structure", async function () {
|
||||
const filename = "structure_simple.pdf";
|
||||
const params = buildGetDocumentParams(filename);
|
||||
const loadingTask = getDocument(params);
|
||||
const doc = await loadingTask.promise;
|
||||
const page = await doc.getPage(1);
|
||||
const struct = await page.getStructTree();
|
||||
equalTrees(
|
||||
{
|
||||
role: "Root",
|
||||
children: [
|
||||
{
|
||||
role: "Document",
|
||||
lang: "en-US",
|
||||
children: [
|
||||
{
|
||||
role: "H1",
|
||||
children: [
|
||||
{ role: "NonStruct", children: [{ type: "content" }] },
|
||||
],
|
||||
},
|
||||
{
|
||||
role: "P",
|
||||
children: [
|
||||
{ role: "NonStruct", children: [{ type: "content" }] },
|
||||
],
|
||||
},
|
||||
{
|
||||
role: "H2",
|
||||
children: [
|
||||
{ role: "NonStruct", children: [{ type: "content" }] },
|
||||
],
|
||||
},
|
||||
{
|
||||
role: "P",
|
||||
children: [
|
||||
{ role: "NonStruct", children: [{ type: "content" }] },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
struct
|
||||
);
|
||||
await loadingTask.destroy();
|
||||
});
|
||||
|
||||
it("parses structure with marked content reference", async function () {
|
||||
const filename = "issue6782.pdf";
|
||||
const params = buildGetDocumentParams(filename);
|
||||
const loadingTask = getDocument(params);
|
||||
const doc = await loadingTask.promise;
|
||||
const page = await doc.getPage(1);
|
||||
const struct = await page.getStructTree();
|
||||
equalTrees(
|
||||
{
|
||||
role: "Root",
|
||||
children: [
|
||||
{
|
||||
role: "Part",
|
||||
children: [
|
||||
{ role: "P", children: Array(27).fill({ type: "content" }) },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
struct
|
||||
);
|
||||
await loadingTask.destroy();
|
||||
});
|
||||
});
|
||||
|
||||
it("parses structure with a figure and its bounding box", async function () {
|
||||
const filename = "bug1708040.pdf";
|
||||
const params = buildGetDocumentParams(filename);
|
||||
const loadingTask = getDocument(params);
|
||||
const doc = await loadingTask.promise;
|
||||
const page = await doc.getPage(1);
|
||||
const struct = await page.getStructTree();
|
||||
equalTrees(
|
||||
{
|
||||
children: [
|
||||
{
|
||||
role: "Document",
|
||||
children: [
|
||||
{
|
||||
role: "Sect",
|
||||
children: [
|
||||
{
|
||||
role: "P",
|
||||
children: [{ type: "content", id: "p21R_mc0" }],
|
||||
lang: "EN-US",
|
||||
},
|
||||
{
|
||||
role: "P",
|
||||
children: [{ type: "content", id: "p21R_mc1" }],
|
||||
lang: "EN-US",
|
||||
},
|
||||
{
|
||||
role: "Figure",
|
||||
children: [{ type: "content", id: "p21R_mc2" }],
|
||||
alt: "A logo of a fox and a globe\u0000",
|
||||
bbox: [72, 287.782, 456, 695.032],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
role: "Root",
|
||||
},
|
||||
struct
|
||||
);
|
||||
await loadingTask.destroy();
|
||||
});
|
||||
});
|
||||
72
test/unit/svg_factory_spec.js
Normal file
72
test/unit/svg_factory_spec.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { DOMSVGFactory } from "../../src/display/svg_factory.js";
|
||||
import { isNodeJS } from "../../src/shared/util.js";
|
||||
|
||||
describe("svg_factory", function () {
|
||||
describe("DOMSVGFactory", function () {
|
||||
let svgFactory;
|
||||
|
||||
beforeAll(function () {
|
||||
svgFactory = new DOMSVGFactory();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
svgFactory = null;
|
||||
});
|
||||
|
||||
it("`create` should throw an error if the dimensions are invalid", function () {
|
||||
// Invalid width.
|
||||
expect(function () {
|
||||
return svgFactory.create(-1, 0);
|
||||
}).toThrow(new Error("Invalid SVG dimensions"));
|
||||
|
||||
// Invalid height.
|
||||
expect(function () {
|
||||
return svgFactory.create(0, -1);
|
||||
}).toThrow(new Error("Invalid SVG dimensions"));
|
||||
});
|
||||
|
||||
it("`create` should return an SVG element if the dimensions are valid", function () {
|
||||
if (isNodeJS) {
|
||||
pending("Document is not supported in Node.js.");
|
||||
}
|
||||
|
||||
const svg = svgFactory.create(20, 40);
|
||||
expect(svg instanceof SVGSVGElement).toBe(true);
|
||||
expect(svg.getAttribute("version")).toBe("1.1");
|
||||
expect(svg.getAttribute("width")).toBe("20px");
|
||||
expect(svg.getAttribute("height")).toBe("40px");
|
||||
expect(svg.getAttribute("preserveAspectRatio")).toBe("none");
|
||||
expect(svg.getAttribute("viewBox")).toBe("0 0 20 40");
|
||||
});
|
||||
|
||||
it("`createElement` should throw an error if the type is not a string", function () {
|
||||
expect(function () {
|
||||
return svgFactory.createElement(true);
|
||||
}).toThrow(new Error("Invalid SVG element type"));
|
||||
});
|
||||
|
||||
it("`createElement` should return an SVG element if the type is valid", function () {
|
||||
if (isNodeJS) {
|
||||
pending("Document is not supported in Node.js.");
|
||||
}
|
||||
|
||||
const svg = svgFactory.createElement("svg:rect");
|
||||
expect(svg instanceof SVGRectElement).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
261
test/unit/test_utils.js
Normal file
261
test/unit/test_utils.js
Normal file
@@ -0,0 +1,261 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { assert, isNodeJS } from "../../src/shared/util.js";
|
||||
import {
|
||||
fetchData as fetchDataNode,
|
||||
NodeCMapReaderFactory,
|
||||
NodeStandardFontDataFactory,
|
||||
} from "../../src/display/node_utils.js";
|
||||
import { NullStream, StringStream } from "../../src/core/stream.js";
|
||||
import { Page, PDFDocument } from "../../src/core/document.js";
|
||||
import { DOMCMapReaderFactory } from "../../src/display/cmap_reader_factory.js";
|
||||
import { DOMStandardFontDataFactory } from "../../src/display/standard_fontdata_factory.js";
|
||||
import { fetchData as fetchDataDOM } from "../../src/display/display_utils.js";
|
||||
import { Ref } from "../../src/core/primitives.js";
|
||||
|
||||
const TEST_PDFS_PATH = isNodeJS ? "./test/pdfs/" : "../pdfs/";
|
||||
|
||||
const CMAP_URL = isNodeJS ? "./external/bcmaps/" : "../../external/bcmaps/";
|
||||
|
||||
const STANDARD_FONT_DATA_URL = isNodeJS
|
||||
? "./external/standard_fonts/"
|
||||
: "../../external/standard_fonts/";
|
||||
|
||||
const WASM_URL = isNodeJS ? "./external/openjpeg/" : "../../external/openjpeg/";
|
||||
|
||||
class DefaultFileReaderFactory {
|
||||
static async fetch(params) {
|
||||
if (isNodeJS) {
|
||||
return fetchDataNode(params.path);
|
||||
}
|
||||
const data = await fetchDataDOM(params.path, /* type = */ "arraybuffer");
|
||||
return new Uint8Array(data);
|
||||
}
|
||||
}
|
||||
|
||||
const DefaultCMapReaderFactory =
|
||||
typeof PDFJSDev !== "undefined" && PDFJSDev.test("GENERIC") && isNodeJS
|
||||
? NodeCMapReaderFactory
|
||||
: DOMCMapReaderFactory;
|
||||
|
||||
const DefaultStandardFontDataFactory =
|
||||
typeof PDFJSDev !== "undefined" && PDFJSDev.test("GENERIC") && isNodeJS
|
||||
? NodeStandardFontDataFactory
|
||||
: DOMStandardFontDataFactory;
|
||||
|
||||
function buildGetDocumentParams(filename, options) {
|
||||
const params = Object.create(null);
|
||||
params.url = isNodeJS
|
||||
? TEST_PDFS_PATH + filename
|
||||
: new URL(TEST_PDFS_PATH + filename, window.location).href;
|
||||
params.standardFontDataUrl = STANDARD_FONT_DATA_URL;
|
||||
params.wasmUrl = WASM_URL;
|
||||
|
||||
for (const option in options) {
|
||||
params[option] = options[option];
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
function getCrossOriginHostname(hostname) {
|
||||
if (hostname === "localhost") {
|
||||
// Note: This does not work if localhost is listening on IPv6 only.
|
||||
// As a work-around, visit the IPv6 version at:
|
||||
// http://[::1]:8888/test/unit/unit_test.html?spec=Cross-origin
|
||||
return "127.0.0.1";
|
||||
}
|
||||
|
||||
if (hostname === "127.0.0.1" || hostname === "[::1]") {
|
||||
return "localhost";
|
||||
}
|
||||
|
||||
// FQDN are cross-origin and browsers usually resolve them to the same server.
|
||||
return hostname.endsWith(".") ? hostname.slice(0, -1) : hostname + ".";
|
||||
}
|
||||
|
||||
class XRefMock {
|
||||
constructor(array) {
|
||||
this._map = Object.create(null);
|
||||
this._newTemporaryRefNum = null;
|
||||
this._newPersistentRefNum = null;
|
||||
this.stream = new NullStream();
|
||||
|
||||
for (const key in array) {
|
||||
const obj = array[key];
|
||||
this._map[obj.ref.toString()] = obj.data;
|
||||
}
|
||||
}
|
||||
|
||||
getNewPersistentRef(obj) {
|
||||
if (this._newPersistentRefNum === null) {
|
||||
this._newPersistentRefNum = Object.keys(this._map).length || 1;
|
||||
}
|
||||
const ref = Ref.get(this._newPersistentRefNum++, 0);
|
||||
this._map[ref.toString()] = obj;
|
||||
return ref;
|
||||
}
|
||||
|
||||
getNewTemporaryRef() {
|
||||
if (this._newTemporaryRefNum === null) {
|
||||
this._newTemporaryRefNum = Object.keys(this._map).length || 1;
|
||||
}
|
||||
return Ref.get(this._newTemporaryRefNum++, 0);
|
||||
}
|
||||
|
||||
resetNewTemporaryRef() {
|
||||
this._newTemporaryRefNum = null;
|
||||
}
|
||||
|
||||
fetch(ref) {
|
||||
return this._map[ref.toString()];
|
||||
}
|
||||
|
||||
async fetchAsync(ref) {
|
||||
return this.fetch(ref);
|
||||
}
|
||||
|
||||
fetchIfRef(obj) {
|
||||
if (obj instanceof Ref) {
|
||||
return this.fetch(obj);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
async fetchIfRefAsync(obj) {
|
||||
return this.fetchIfRef(obj);
|
||||
}
|
||||
}
|
||||
|
||||
function createIdFactory(pageIndex) {
|
||||
const pdfManager = {
|
||||
get docId() {
|
||||
return "d0";
|
||||
},
|
||||
};
|
||||
const stream = new StringStream("Dummy_PDF_data");
|
||||
const pdfDocument = new PDFDocument(pdfManager, stream);
|
||||
|
||||
const page = new Page({
|
||||
pdfManager: pdfDocument.pdfManager,
|
||||
xref: pdfDocument.xref,
|
||||
pageIndex,
|
||||
globalIdFactory: pdfDocument._globalIdFactory,
|
||||
});
|
||||
return page._localIdFactory;
|
||||
}
|
||||
|
||||
// Some tests rely on special behavior from webserver.mjs. When loaded in the
|
||||
// browser, the page is already served from WebServer. When running from
|
||||
// Node.js, that is not the case. This helper starts the WebServer if needed,
|
||||
// and offers a mechanism to resolve the URL in a uniform way.
|
||||
class TestPdfsServer {
|
||||
static #webServer;
|
||||
|
||||
static #startCount = 0;
|
||||
|
||||
static #startPromise;
|
||||
|
||||
static async ensureStarted() {
|
||||
if (this.#startCount++) {
|
||||
// Already started before. E.g. from another beforeAll call.
|
||||
return this.#startPromise;
|
||||
}
|
||||
if (!isNodeJS) {
|
||||
// In web browsers, tests are presumably served by webserver.mjs.
|
||||
return undefined;
|
||||
}
|
||||
|
||||
this.#startPromise = this.#startServer().finally(() => {
|
||||
this.#startPromise = null;
|
||||
});
|
||||
return this.#startPromise;
|
||||
}
|
||||
|
||||
static async #startServer() {
|
||||
// WebServer from webserver.mjs is imported dynamically instead of
|
||||
// statically because we do not need it when running from the browser.
|
||||
let WebServer;
|
||||
if (import.meta.url.endsWith("/lib-legacy/test/unit/test_utils.js")) {
|
||||
// When "gulp unittestcli" is used to run tests, the tests are run from
|
||||
// pdf.js/build/lib-legacy/test/ instead of directly from pdf.js/test/.
|
||||
// eslint-disable-next-line import/no-unresolved
|
||||
({ WebServer } = await import("../../../../test/webserver.mjs"));
|
||||
} else {
|
||||
({ WebServer } = await import("../webserver.mjs"));
|
||||
}
|
||||
this.#webServer = new WebServer({
|
||||
host: "127.0.0.1",
|
||||
root: TEST_PDFS_PATH,
|
||||
});
|
||||
await new Promise(resolve => {
|
||||
this.#webServer.start(resolve);
|
||||
});
|
||||
}
|
||||
|
||||
static async ensureStopped() {
|
||||
assert(this.#startCount > 0, "ensureStarted() should be called first");
|
||||
assert(!this.#startPromise, "ensureStarted() should have resolved");
|
||||
if (--this.#startCount) {
|
||||
// Keep server alive as long as there is an ensureStarted() that was not
|
||||
// followed by an ensureStopped() call.
|
||||
// This could happen if ensureStarted() was called again before
|
||||
// ensureStopped() was called from afterAll().
|
||||
return;
|
||||
}
|
||||
if (!isNodeJS) {
|
||||
// Web browsers cannot stop the server.
|
||||
return;
|
||||
}
|
||||
|
||||
await new Promise(resolve => {
|
||||
this.#webServer.stop(resolve);
|
||||
this.#webServer = null;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path - path to file within test/unit/pdf/ (TEST_PDFS_PATH).
|
||||
* @returns {URL}
|
||||
*/
|
||||
static resolveURL(path) {
|
||||
assert(this.#startCount > 0, "ensureStarted() should be called first");
|
||||
assert(!this.#startPromise, "ensureStarted() should have resolved");
|
||||
|
||||
if (isNodeJS) {
|
||||
// Note: TestPdfsServer.ensureStarted() should be called first.
|
||||
return new URL(path, `http://127.0.0.1:${this.#webServer.port}/`);
|
||||
}
|
||||
// When "gulp server" is used, our URL looks like
|
||||
// http://localhost:8888/test/unit/unit_test.html
|
||||
// The PDFs are served from:
|
||||
// http://localhost:8888/test/pdfs/
|
||||
return new URL(TEST_PDFS_PATH + path, window.location);
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
buildGetDocumentParams,
|
||||
CMAP_URL,
|
||||
createIdFactory,
|
||||
DefaultCMapReaderFactory,
|
||||
DefaultFileReaderFactory,
|
||||
DefaultStandardFontDataFactory,
|
||||
getCrossOriginHostname,
|
||||
STANDARD_FONT_DATA_URL,
|
||||
TEST_PDFS_PATH,
|
||||
TestPdfsServer,
|
||||
XRefMock,
|
||||
};
|
||||
253
test/unit/text_layer_spec.js
Normal file
253
test/unit/text_layer_spec.js
Normal file
@@ -0,0 +1,253 @@
|
||||
/* Copyright 2022 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { buildGetDocumentParams } from "./test_utils.js";
|
||||
import { getDocument } from "../../src/display/api.js";
|
||||
import { isNodeJS } from "../../src/shared/util.js";
|
||||
import { TextLayer } from "../../src/display/text_layer.js";
|
||||
|
||||
describe("textLayer", function () {
|
||||
it("creates textLayer from ReadableStream", async function () {
|
||||
if (isNodeJS) {
|
||||
pending("document.createElement is not supported in Node.js.");
|
||||
}
|
||||
const loadingTask = getDocument(buildGetDocumentParams("basicapi.pdf"));
|
||||
const pdfDocument = await loadingTask.promise;
|
||||
const page = await pdfDocument.getPage(1);
|
||||
|
||||
const textLayer = new TextLayer({
|
||||
textContentSource: page.streamTextContent(),
|
||||
container: document.createElement("div"),
|
||||
viewport: page.getViewport({ scale: 1 }),
|
||||
});
|
||||
await textLayer.render();
|
||||
|
||||
expect(textLayer.textContentItemsStr).toEqual([
|
||||
"Table Of Content",
|
||||
"",
|
||||
"Chapter 1",
|
||||
" ",
|
||||
"..........................................................",
|
||||
" ",
|
||||
"2",
|
||||
"",
|
||||
"Paragraph 1.1",
|
||||
" ",
|
||||
"......................................................",
|
||||
" ",
|
||||
"3",
|
||||
"",
|
||||
"page 1 / 3",
|
||||
]);
|
||||
|
||||
await loadingTask.destroy();
|
||||
});
|
||||
|
||||
it("creates textLayer from TextContent", async function () {
|
||||
if (isNodeJS) {
|
||||
pending("document.createElement is not supported in Node.js.");
|
||||
}
|
||||
const loadingTask = getDocument(buildGetDocumentParams("basicapi.pdf"));
|
||||
const pdfDocument = await loadingTask.promise;
|
||||
const page = await pdfDocument.getPage(1);
|
||||
|
||||
const textLayer = new TextLayer({
|
||||
textContentSource: await page.getTextContent(),
|
||||
container: document.createElement("div"),
|
||||
viewport: page.getViewport({ scale: 1 }),
|
||||
});
|
||||
await textLayer.render();
|
||||
|
||||
expect(textLayer.textContentItemsStr).toEqual([
|
||||
"Table Of Content",
|
||||
"",
|
||||
"Chapter 1",
|
||||
" ",
|
||||
"..........................................................",
|
||||
" ",
|
||||
"2",
|
||||
"",
|
||||
"Paragraph 1.1",
|
||||
" ",
|
||||
"......................................................",
|
||||
" ",
|
||||
"3",
|
||||
"",
|
||||
"page 1 / 3",
|
||||
]);
|
||||
|
||||
await loadingTask.destroy();
|
||||
});
|
||||
|
||||
it("creates textLayers in parallel, from ReadableStream", async function () {
|
||||
if (isNodeJS) {
|
||||
pending("document.createElement is not supported in Node.js.");
|
||||
}
|
||||
if (typeof ReadableStream.from !== "function") {
|
||||
pending("ReadableStream.from is not supported.");
|
||||
}
|
||||
const getTransform = container => {
|
||||
const transform = [];
|
||||
|
||||
for (const span of container.childNodes) {
|
||||
const t = span.style.transform;
|
||||
expect(t).toMatch(/^scaleX\([\d.]+\)$/);
|
||||
|
||||
transform.push(t);
|
||||
}
|
||||
return transform;
|
||||
};
|
||||
|
||||
const loadingTask = getDocument(buildGetDocumentParams("basicapi.pdf"));
|
||||
const pdfDocument = await loadingTask.promise;
|
||||
const [page1, page2] = await Promise.all([
|
||||
pdfDocument.getPage(1),
|
||||
pdfDocument.getPage(2),
|
||||
]);
|
||||
|
||||
// Create text-content streams with dummy content.
|
||||
const items1 = [
|
||||
{
|
||||
str: "Chapter A",
|
||||
dir: "ltr",
|
||||
width: 100,
|
||||
height: 20,
|
||||
transform: [20, 0, 0, 20, 45, 744],
|
||||
fontName: "g_d0_f1",
|
||||
hasEOL: false,
|
||||
},
|
||||
{
|
||||
str: "page 1",
|
||||
dir: "ltr",
|
||||
width: 50,
|
||||
height: 20,
|
||||
transform: [20, 0, 0, 20, 45, 744],
|
||||
fontName: "g_d0_f1",
|
||||
hasEOL: false,
|
||||
},
|
||||
];
|
||||
const items2 = [
|
||||
{
|
||||
str: "Chapter B",
|
||||
dir: "ltr",
|
||||
width: 120,
|
||||
height: 10,
|
||||
transform: [10, 0, 0, 10, 492, 16],
|
||||
fontName: "g_d0_f2",
|
||||
hasEOL: false,
|
||||
},
|
||||
{
|
||||
str: "page 2",
|
||||
dir: "ltr",
|
||||
width: 60,
|
||||
height: 10,
|
||||
transform: [10, 0, 0, 10, 492, 16],
|
||||
fontName: "g_d0_f2",
|
||||
hasEOL: false,
|
||||
},
|
||||
];
|
||||
|
||||
const styles = {
|
||||
g_d0_f1: {
|
||||
ascent: 0.75,
|
||||
descent: -0.25,
|
||||
fontFamily: "serif",
|
||||
vertical: false,
|
||||
},
|
||||
g_d0_f2: {
|
||||
ascent: 0.5,
|
||||
descent: -0.5,
|
||||
fontFamily: "sans-serif",
|
||||
vertical: false,
|
||||
},
|
||||
};
|
||||
const lang = "en";
|
||||
|
||||
// Render the textLayers serially, to have something to compare against.
|
||||
const serialContainer1 = document.createElement("div"),
|
||||
serialContainer2 = document.createElement("div");
|
||||
|
||||
const serialTextLayer1 = new TextLayer({
|
||||
textContentSource: { items: items1, styles, lang },
|
||||
container: serialContainer1,
|
||||
viewport: page1.getViewport({ scale: 1 }),
|
||||
});
|
||||
await serialTextLayer1.render();
|
||||
|
||||
const serialTextLayer2 = new TextLayer({
|
||||
textContentSource: { items: items2, styles, lang },
|
||||
container: serialContainer2,
|
||||
viewport: page2.getViewport({ scale: 1 }),
|
||||
});
|
||||
await serialTextLayer2.render();
|
||||
|
||||
const serialTransform1 = getTransform(serialContainer1),
|
||||
serialTransform2 = getTransform(serialContainer2);
|
||||
|
||||
expect(serialTransform1.length).toEqual(2);
|
||||
expect(serialTransform2.length).toEqual(2);
|
||||
|
||||
// Reset any global textLayer-state before rendering in parallel.
|
||||
TextLayer.cleanup();
|
||||
|
||||
const container1 = document.createElement("div"),
|
||||
container2 = document.createElement("div");
|
||||
const waitCapability1 = Promise.withResolvers();
|
||||
|
||||
const streamGenerator1 = (async function* () {
|
||||
for (const item of items1) {
|
||||
yield { items: [item], styles, lang };
|
||||
await waitCapability1.promise;
|
||||
}
|
||||
})();
|
||||
const streamGenerator2 = (async function* () {
|
||||
for (const item of items2) {
|
||||
yield { items: [item], styles, lang };
|
||||
}
|
||||
})();
|
||||
|
||||
const textLayer1 = new TextLayer({
|
||||
textContentSource: ReadableStream.from(streamGenerator1),
|
||||
container: container1,
|
||||
viewport: page1.getViewport({ scale: 1 }),
|
||||
});
|
||||
const textLayer1Promise = textLayer1.render();
|
||||
|
||||
const textLayer2 = new TextLayer({
|
||||
textContentSource: ReadableStream.from(streamGenerator2),
|
||||
container: container2,
|
||||
viewport: page2.getViewport({ scale: 1 }),
|
||||
});
|
||||
await textLayer2.render();
|
||||
|
||||
// Ensure that the first textLayer has its rendering "paused" while
|
||||
// the second textLayer renders.
|
||||
waitCapability1.resolve();
|
||||
await textLayer1Promise;
|
||||
|
||||
// Sanity check to make sure that all text was parsed.
|
||||
expect(textLayer1.textContentItemsStr).toEqual(["Chapter A", "page 1"]);
|
||||
expect(textLayer2.textContentItemsStr).toEqual(["Chapter B", "page 2"]);
|
||||
|
||||
// Ensure that the transforms are identical when parsing in series/parallel.
|
||||
const transform1 = getTransform(container1),
|
||||
transform2 = getTransform(container2);
|
||||
|
||||
expect(transform1).toEqual(serialTransform1);
|
||||
expect(transform2).toEqual(serialTransform2);
|
||||
|
||||
await loadingTask.destroy();
|
||||
});
|
||||
});
|
||||
122
test/unit/type1_parser_spec.js
Normal file
122
test/unit/type1_parser_spec.js
Normal file
@@ -0,0 +1,122 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { SEAC_ANALYSIS_ENABLED } from "../../src/core/fonts_utils.js";
|
||||
import { StringStream } from "../../src/core/stream.js";
|
||||
import { Type1Parser } from "../../src/core/type1_parser.js";
|
||||
|
||||
describe("Type1Parser", function () {
|
||||
it("splits tokens", function () {
|
||||
const stream = new StringStream("/BlueValues[-17 0]noaccess def");
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
expect(parser.getToken()).toEqual("/");
|
||||
expect(parser.getToken()).toEqual("BlueValues");
|
||||
expect(parser.getToken()).toEqual("[");
|
||||
expect(parser.getToken()).toEqual("-17");
|
||||
expect(parser.getToken()).toEqual("0");
|
||||
expect(parser.getToken()).toEqual("]");
|
||||
expect(parser.getToken()).toEqual("noaccess");
|
||||
expect(parser.getToken()).toEqual("def");
|
||||
expect(parser.getToken()).toEqual(null);
|
||||
});
|
||||
|
||||
it("handles glued tokens", function () {
|
||||
const stream = new StringStream("dup/CharStrings");
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
expect(parser.getToken()).toEqual("dup");
|
||||
expect(parser.getToken()).toEqual("/");
|
||||
expect(parser.getToken()).toEqual("CharStrings");
|
||||
});
|
||||
|
||||
it("ignores whitespace", function () {
|
||||
const stream = new StringStream("\nab c\t");
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
expect(parser.getToken()).toEqual("ab");
|
||||
expect(parser.getToken()).toEqual("c");
|
||||
});
|
||||
|
||||
it("parses numbers", function () {
|
||||
const stream = new StringStream("123");
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
expect(parser.readNumber()).toEqual(123);
|
||||
});
|
||||
|
||||
it("parses booleans", function () {
|
||||
const stream = new StringStream("true false");
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
expect(parser.readBoolean()).toEqual(1);
|
||||
expect(parser.readBoolean()).toEqual(0);
|
||||
});
|
||||
|
||||
it("parses number arrays", function () {
|
||||
let stream = new StringStream("[1 2]");
|
||||
let parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
expect(parser.readNumberArray()).toEqual([1, 2]);
|
||||
// Variation on spacing.
|
||||
stream = new StringStream("[ 1 2 ]");
|
||||
parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
expect(parser.readNumberArray()).toEqual([1, 2]);
|
||||
});
|
||||
|
||||
it("skips comments", function () {
|
||||
const stream = new StringStream(
|
||||
"%!PS-AdobeFont-1.0: CMSY10 003.002\n" +
|
||||
"%%Title: CMSY10\n" +
|
||||
"%Version: 003.002\n" +
|
||||
"FontDirectory"
|
||||
);
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
expect(parser.getToken()).toEqual("FontDirectory");
|
||||
});
|
||||
|
||||
it("parses font program", function () {
|
||||
const stream = new StringStream(
|
||||
"/ExpansionFactor 99\n" +
|
||||
"/Subrs 1 array\n" +
|
||||
"dup 0 1 RD x noaccess put\n" +
|
||||
"end\n" +
|
||||
"/CharStrings 46 dict dup begin\n" +
|
||||
"/.notdef 1 RD x ND\n" +
|
||||
"end"
|
||||
);
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
const program = parser.extractFontProgram({});
|
||||
expect(program.charstrings.length).toEqual(1);
|
||||
expect(program.properties.privateData.ExpansionFactor).toEqual(99);
|
||||
});
|
||||
|
||||
it("parses font header font matrix", function () {
|
||||
const stream = new StringStream(
|
||||
"/FontMatrix [0.001 0 0 0.001 0 0 ]readonly def\n"
|
||||
);
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
const props = {};
|
||||
parser.extractFontHeader(props);
|
||||
expect(props.fontMatrix).toEqual([0.001, 0, 0, 0.001, 0, 0]);
|
||||
});
|
||||
|
||||
it("parses font header encoding", function () {
|
||||
const stream = new StringStream(
|
||||
"/Encoding 256 array\n" +
|
||||
"0 1 255 {1 index exch /.notdef put} for\n" +
|
||||
"dup 33 /arrowright put\n" +
|
||||
"readonly def\n"
|
||||
);
|
||||
const parser = new Type1Parser(stream, false, SEAC_ANALYSIS_ENABLED);
|
||||
const props = { overridableEncoding: true };
|
||||
parser.extractFontHeader(props);
|
||||
expect(props.builtInEncoding[33]).toEqual("arrowright");
|
||||
});
|
||||
});
|
||||
651
test/unit/ui_utils_spec.js
Normal file
651
test/unit/ui_utils_spec.js
Normal file
@@ -0,0 +1,651 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
backtrackBeforeAllVisibleElements,
|
||||
binarySearchFirstItem,
|
||||
calcRound,
|
||||
getPageSizeInches,
|
||||
getVisibleElements,
|
||||
isPortraitOrientation,
|
||||
isValidRotation,
|
||||
parseQueryString,
|
||||
removeNullCharacters,
|
||||
} from "../../web/ui_utils.js";
|
||||
|
||||
describe("ui_utils", function () {
|
||||
describe("binary search", function () {
|
||||
function isTrue(boolean) {
|
||||
return boolean;
|
||||
}
|
||||
function isGreater3(number) {
|
||||
return number > 3;
|
||||
}
|
||||
|
||||
it("empty array", function () {
|
||||
expect(binarySearchFirstItem([], isTrue)).toEqual(0);
|
||||
});
|
||||
it("single boolean entry", function () {
|
||||
expect(binarySearchFirstItem([false], isTrue)).toEqual(1);
|
||||
expect(binarySearchFirstItem([true], isTrue)).toEqual(0);
|
||||
});
|
||||
it("three boolean entries", function () {
|
||||
expect(binarySearchFirstItem([true, true, true], isTrue)).toEqual(0);
|
||||
expect(binarySearchFirstItem([false, true, true], isTrue)).toEqual(1);
|
||||
expect(binarySearchFirstItem([false, false, true], isTrue)).toEqual(2);
|
||||
expect(binarySearchFirstItem([false, false, false], isTrue)).toEqual(3);
|
||||
});
|
||||
it("three numeric entries", function () {
|
||||
expect(binarySearchFirstItem([0, 1, 2], isGreater3)).toEqual(3);
|
||||
expect(binarySearchFirstItem([2, 3, 4], isGreater3)).toEqual(2);
|
||||
expect(binarySearchFirstItem([4, 5, 6], isGreater3)).toEqual(0);
|
||||
});
|
||||
it("three numeric entries and a start index", function () {
|
||||
expect(binarySearchFirstItem([0, 1, 2, 3, 4], isGreater3, 2)).toEqual(4);
|
||||
expect(binarySearchFirstItem([2, 3, 4], isGreater3, 2)).toEqual(2);
|
||||
expect(binarySearchFirstItem([4, 5, 6], isGreater3, 1)).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValidRotation", function () {
|
||||
it("should reject non-integer angles", function () {
|
||||
expect(isValidRotation()).toEqual(false);
|
||||
expect(isValidRotation(null)).toEqual(false);
|
||||
expect(isValidRotation(NaN)).toEqual(false);
|
||||
expect(isValidRotation([90])).toEqual(false);
|
||||
expect(isValidRotation("90")).toEqual(false);
|
||||
expect(isValidRotation(90.5)).toEqual(false);
|
||||
});
|
||||
|
||||
it("should reject non-multiple of 90 degree angles", function () {
|
||||
expect(isValidRotation(45)).toEqual(false);
|
||||
expect(isValidRotation(-123)).toEqual(false);
|
||||
});
|
||||
|
||||
it("should accept valid angles", function () {
|
||||
expect(isValidRotation(0)).toEqual(true);
|
||||
expect(isValidRotation(90)).toEqual(true);
|
||||
expect(isValidRotation(-270)).toEqual(true);
|
||||
expect(isValidRotation(540)).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isPortraitOrientation", function () {
|
||||
it("should be portrait orientation", function () {
|
||||
expect(
|
||||
isPortraitOrientation({
|
||||
width: 200,
|
||||
height: 400,
|
||||
})
|
||||
).toEqual(true);
|
||||
|
||||
expect(
|
||||
isPortraitOrientation({
|
||||
width: 500,
|
||||
height: 500,
|
||||
})
|
||||
).toEqual(true);
|
||||
});
|
||||
|
||||
it("should be landscape orientation", function () {
|
||||
expect(
|
||||
isPortraitOrientation({
|
||||
width: 600,
|
||||
height: 300,
|
||||
})
|
||||
).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseQueryString", function () {
|
||||
it("should parse one key/value pair", function () {
|
||||
const parameters = parseQueryString("key1=value1");
|
||||
expect(parameters.size).toEqual(1);
|
||||
expect(parameters.get("key1")).toEqual("value1");
|
||||
});
|
||||
|
||||
it("should parse multiple key/value pairs", function () {
|
||||
const parameters = parseQueryString(
|
||||
"key1=value1&key2=value2&key3=value3"
|
||||
);
|
||||
expect(parameters.size).toEqual(3);
|
||||
expect(parameters.get("key1")).toEqual("value1");
|
||||
expect(parameters.get("key2")).toEqual("value2");
|
||||
expect(parameters.get("key3")).toEqual("value3");
|
||||
});
|
||||
|
||||
it("should parse keys without values", function () {
|
||||
const parameters = parseQueryString("key1");
|
||||
expect(parameters.size).toEqual(1);
|
||||
expect(parameters.get("key1")).toEqual("");
|
||||
});
|
||||
|
||||
it("should decode encoded key/value pairs", function () {
|
||||
const parameters = parseQueryString("k%C3%ABy1=valu%C3%AB1");
|
||||
expect(parameters.size).toEqual(1);
|
||||
expect(parameters.get("këy1")).toEqual("valuë1");
|
||||
});
|
||||
|
||||
it("should convert keys to lowercase", function () {
|
||||
const parameters = parseQueryString("Key1=Value1&KEY2=Value2");
|
||||
expect(parameters.size).toEqual(2);
|
||||
expect(parameters.get("key1")).toEqual("Value1");
|
||||
expect(parameters.get("key2")).toEqual("Value2");
|
||||
});
|
||||
});
|
||||
|
||||
describe("removeNullCharacters", function () {
|
||||
it("should not modify string without null characters", function () {
|
||||
const str = "string without null chars";
|
||||
expect(removeNullCharacters(str)).toEqual("string without null chars");
|
||||
});
|
||||
|
||||
it("should modify string with null characters", function () {
|
||||
const str = "string\x00With\x00Null\x00Chars";
|
||||
expect(removeNullCharacters(str)).toEqual("stringWithNullChars");
|
||||
});
|
||||
|
||||
it("should modify string with non-displayable characters", function () {
|
||||
const str = Array.from(
|
||||
Array(32).keys(),
|
||||
x => String.fromCharCode(x) + "a"
|
||||
).join("");
|
||||
// \x00 is replaced by an empty string.
|
||||
const expected =
|
||||
"a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a";
|
||||
expect(removeNullCharacters(str, /* replaceInvisible */ true)).toEqual(
|
||||
expected
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPageSizeInches", function () {
|
||||
it("gets page size (in inches)", function () {
|
||||
const page = {
|
||||
view: [0, 0, 595.28, 841.89],
|
||||
userUnit: 1.0,
|
||||
rotate: 0,
|
||||
};
|
||||
const { width, height } = getPageSizeInches(page);
|
||||
|
||||
expect(+width.toPrecision(3)).toEqual(8.27);
|
||||
expect(+height.toPrecision(4)).toEqual(11.69);
|
||||
});
|
||||
|
||||
it("gets page size (in inches), for non-default /Rotate entry", function () {
|
||||
const pdfPage1 = { view: [0, 0, 612, 792], userUnit: 1, rotate: 0 };
|
||||
const { width: width1, height: height1 } = getPageSizeInches(pdfPage1);
|
||||
|
||||
expect(width1).toEqual(8.5);
|
||||
expect(height1).toEqual(11);
|
||||
|
||||
const pdfPage2 = { view: [0, 0, 612, 792], userUnit: 1, rotate: 90 };
|
||||
const { width: width2, height: height2 } = getPageSizeInches(pdfPage2);
|
||||
|
||||
expect(width2).toEqual(11);
|
||||
expect(height2).toEqual(8.5);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getVisibleElements", function () {
|
||||
// These values are based on margin/border values in the CSS, but there
|
||||
// isn't any real need for them to be; they just need to take *some* value.
|
||||
const BORDER_WIDTH = 9;
|
||||
const SPACING = 2 * BORDER_WIDTH - 7;
|
||||
|
||||
// This is a helper function for assembling an array of view stubs from an
|
||||
// array of arrays of [width, height] pairs, which represents wrapped lines
|
||||
// of pages. It uses the above constants to add realistic spacing between
|
||||
// the pages and the lines.
|
||||
//
|
||||
// If you're reading a test that calls makePages, you should think of the
|
||||
// inputs to makePages as boxes with no borders, being laid out in a
|
||||
// container that has no margins, so that the top of the tallest page in
|
||||
// the first row will be at y = 0, and the left of the first page in
|
||||
// the first row will be at x = 0. The spacing between pages in a row, and
|
||||
// the spacing between rows, is SPACING. If you wanted to construct an
|
||||
// actual HTML document with the same layout, you should give each page
|
||||
// element a margin-right and margin-bottom of SPACING, and add no other
|
||||
// margins, borders, or padding.
|
||||
//
|
||||
// If you're reading makePages itself, you'll see a somewhat more
|
||||
// complicated picture because this suite of tests is exercising
|
||||
// getVisibleElements' ability to account for the borders that real page
|
||||
// elements have. makePages tests this by subtracting a BORDER_WIDTH from
|
||||
// offsetLeft/Top and adding it to clientLeft/Top. So the element stubs that
|
||||
// getVisibleElements sees may, for example, actually have an offsetTop of
|
||||
// -9. If everything is working correctly, this detail won't leak out into
|
||||
// the tests themselves, and so the tests shouldn't use the value of
|
||||
// BORDER_WIDTH at all.
|
||||
function makePages(lines) {
|
||||
const result = [];
|
||||
let lineTop = 0,
|
||||
id = 0;
|
||||
for (const line of lines) {
|
||||
const heights = line.map(pair => pair[1]);
|
||||
const lineHeight = Math.max(...heights);
|
||||
let offsetLeft = -BORDER_WIDTH;
|
||||
for (const [clientWidth, clientHeight] of line) {
|
||||
const offsetTop =
|
||||
lineTop + (lineHeight - clientHeight) / 2 - BORDER_WIDTH;
|
||||
const div = {
|
||||
offsetLeft,
|
||||
offsetTop,
|
||||
clientWidth,
|
||||
clientHeight,
|
||||
clientLeft: BORDER_WIDTH,
|
||||
clientTop: BORDER_WIDTH,
|
||||
};
|
||||
result.push({ id, div });
|
||||
++id;
|
||||
offsetLeft += clientWidth + SPACING;
|
||||
}
|
||||
lineTop += lineHeight + SPACING;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// This is a reimplementation of getVisibleElements without the
|
||||
// optimizations.
|
||||
function slowGetVisibleElements(scroll, pages) {
|
||||
const views = [],
|
||||
ids = new Set();
|
||||
const { scrollLeft, scrollTop } = scroll;
|
||||
const scrollRight = scrollLeft + scroll.clientWidth;
|
||||
const scrollBottom = scrollTop + scroll.clientHeight;
|
||||
for (const view of pages) {
|
||||
const { div } = view;
|
||||
const viewLeft = div.offsetLeft + div.clientLeft;
|
||||
const viewRight = viewLeft + div.clientWidth;
|
||||
const viewTop = div.offsetTop + div.clientTop;
|
||||
const viewBottom = viewTop + div.clientHeight;
|
||||
|
||||
if (
|
||||
viewLeft < scrollRight &&
|
||||
viewRight > scrollLeft &&
|
||||
viewTop < scrollBottom &&
|
||||
viewBottom > scrollTop
|
||||
) {
|
||||
const minY = Math.max(0, scrollTop - viewTop);
|
||||
const minX = Math.max(0, scrollLeft - viewLeft);
|
||||
|
||||
const hiddenHeight = minY + Math.max(0, viewBottom - scrollBottom);
|
||||
const hiddenWidth = minX + Math.max(0, viewRight - scrollRight);
|
||||
|
||||
const fractionHeight =
|
||||
(div.clientHeight - hiddenHeight) / div.clientHeight;
|
||||
const fractionWidth =
|
||||
(div.clientWidth - hiddenWidth) / div.clientWidth;
|
||||
const percent = (fractionHeight * fractionWidth * 100) | 0;
|
||||
|
||||
let visibleArea = null;
|
||||
if (percent < 100) {
|
||||
visibleArea = {
|
||||
minX,
|
||||
minY,
|
||||
maxX: Math.min(viewRight, scrollRight) - viewLeft,
|
||||
maxY: Math.min(viewBottom, scrollBottom) - viewTop,
|
||||
};
|
||||
}
|
||||
|
||||
views.push({
|
||||
id: view.id,
|
||||
x: viewLeft,
|
||||
y: viewTop,
|
||||
view,
|
||||
percent,
|
||||
visibleArea,
|
||||
widthPercent: (fractionWidth * 100) | 0,
|
||||
});
|
||||
ids.add(view.id);
|
||||
}
|
||||
}
|
||||
return { first: views[0], last: views.at(-1), views, ids };
|
||||
}
|
||||
|
||||
// This function takes a fixed layout of pages and compares the system under
|
||||
// test to the slower implementation above, for a range of scroll viewport
|
||||
// sizes and positions.
|
||||
function scrollOverDocument(pages, horizontal = false, rtl = false) {
|
||||
const sizes = pages.map(({ div }) =>
|
||||
horizontal
|
||||
? Math.abs(div.offsetLeft + div.clientLeft + div.clientWidth)
|
||||
: div.offsetTop + div.clientTop + div.clientHeight
|
||||
);
|
||||
const size = Math.max(...sizes);
|
||||
// The numbers (7 and 5) are mostly arbitrary, not magic: increase them to
|
||||
// make scrollOverDocument tests faster, decrease them to make the tests
|
||||
// more scrupulous, and keep them coprime to reduce the chance of missing
|
||||
// weird edge case bugs.
|
||||
for (let i = -size; i < size; i += 7) {
|
||||
// The screen height (or width) here (j - i) doubles on each inner loop
|
||||
// iteration; again, this is just to test an interesting range of cases
|
||||
// without slowing the tests down to check every possible case.
|
||||
for (let j = i + 5; j < size; j += j - i) {
|
||||
const scrollEl = horizontal
|
||||
? {
|
||||
scrollTop: 0,
|
||||
scrollLeft: i,
|
||||
clientHeight: 10000,
|
||||
clientWidth: j - i,
|
||||
}
|
||||
: {
|
||||
scrollTop: i,
|
||||
scrollLeft: 0,
|
||||
clientHeight: j - i,
|
||||
clientWidth: 10000,
|
||||
};
|
||||
expect(
|
||||
getVisibleElements({
|
||||
scrollEl,
|
||||
views: pages,
|
||||
sortByVisibility: false,
|
||||
horizontal,
|
||||
rtl,
|
||||
})
|
||||
).toEqual(slowGetVisibleElements(scrollEl, pages));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it("with pages of varying height", function () {
|
||||
const pages = makePages([
|
||||
[
|
||||
[50, 20],
|
||||
[20, 50],
|
||||
],
|
||||
[
|
||||
[30, 12],
|
||||
[12, 30],
|
||||
],
|
||||
[
|
||||
[20, 50],
|
||||
[50, 20],
|
||||
],
|
||||
[
|
||||
[50, 20],
|
||||
[20, 50],
|
||||
],
|
||||
]);
|
||||
scrollOverDocument(pages);
|
||||
});
|
||||
|
||||
it("widescreen challenge", function () {
|
||||
const pages = makePages([
|
||||
[
|
||||
[10, 50],
|
||||
[10, 60],
|
||||
[10, 70],
|
||||
[10, 80],
|
||||
[10, 90],
|
||||
],
|
||||
[
|
||||
[10, 90],
|
||||
[10, 80],
|
||||
[10, 70],
|
||||
[10, 60],
|
||||
[10, 50],
|
||||
],
|
||||
[
|
||||
[10, 50],
|
||||
[10, 60],
|
||||
[10, 70],
|
||||
[10, 80],
|
||||
[10, 90],
|
||||
],
|
||||
]);
|
||||
scrollOverDocument(pages);
|
||||
});
|
||||
|
||||
it("works with horizontal scrolling", function () {
|
||||
const pages = makePages([
|
||||
[
|
||||
[10, 50],
|
||||
[20, 20],
|
||||
[30, 10],
|
||||
],
|
||||
]);
|
||||
scrollOverDocument(pages, /* horizontal = */ true);
|
||||
});
|
||||
|
||||
it("works with horizontal scrolling with RTL-documents", function () {
|
||||
const pages = makePages([
|
||||
[
|
||||
[-10, 50],
|
||||
[-20, 20],
|
||||
[-30, 10],
|
||||
],
|
||||
]);
|
||||
scrollOverDocument(pages, /* horizontal = */ true, /* rtl = */ true);
|
||||
});
|
||||
|
||||
it("handles `sortByVisibility` correctly", function () {
|
||||
const scrollEl = {
|
||||
scrollTop: 75,
|
||||
scrollLeft: 0,
|
||||
clientHeight: 750,
|
||||
clientWidth: 1500,
|
||||
};
|
||||
const views = makePages([[[100, 150]], [[100, 150]], [[100, 150]]]);
|
||||
|
||||
const visible = getVisibleElements({ scrollEl, views });
|
||||
const visibleSorted = getVisibleElements({
|
||||
scrollEl,
|
||||
views,
|
||||
sortByVisibility: true,
|
||||
});
|
||||
|
||||
const viewsOrder = [],
|
||||
viewsSortedOrder = [];
|
||||
for (const view of visible.views) {
|
||||
viewsOrder.push(view.id);
|
||||
}
|
||||
for (const view of visibleSorted.views) {
|
||||
viewsSortedOrder.push(view.id);
|
||||
}
|
||||
expect(viewsOrder).toEqual([0, 1, 2]);
|
||||
expect(viewsSortedOrder).toEqual([1, 2, 0]);
|
||||
});
|
||||
|
||||
it("handles views being empty", function () {
|
||||
const scrollEl = {
|
||||
scrollTop: 10,
|
||||
scrollLeft: 0,
|
||||
clientHeight: 750,
|
||||
clientWidth: 1500,
|
||||
};
|
||||
const views = [];
|
||||
|
||||
expect(getVisibleElements({ scrollEl, views })).toEqual({
|
||||
first: undefined,
|
||||
last: undefined,
|
||||
views: [],
|
||||
ids: new Set(),
|
||||
});
|
||||
});
|
||||
|
||||
it("handles all views being hidden (without errors)", function () {
|
||||
const scrollEl = {
|
||||
scrollTop: 100000,
|
||||
scrollLeft: 0,
|
||||
clientHeight: 750,
|
||||
clientWidth: 1500,
|
||||
};
|
||||
const views = makePages([[[100, 150]], [[100, 150]], [[100, 150]]]);
|
||||
|
||||
expect(getVisibleElements({ scrollEl, views })).toEqual({
|
||||
first: undefined,
|
||||
last: undefined,
|
||||
views: [],
|
||||
ids: new Set(),
|
||||
});
|
||||
});
|
||||
|
||||
// This sub-suite is for a notionally internal helper function for
|
||||
// getVisibleElements.
|
||||
describe("backtrackBeforeAllVisibleElements", function () {
|
||||
// Layout elements common to all tests
|
||||
const tallPage = [10, 50];
|
||||
const shortPage = [10, 10];
|
||||
|
||||
// A scroll position that ensures that only the tall pages in the second
|
||||
// row are visible
|
||||
const top1 =
|
||||
20 +
|
||||
SPACING + // height of the first row
|
||||
40; // a value between 30 (so the short pages on the second row are
|
||||
// hidden) and 50 (so the tall pages are visible)
|
||||
|
||||
// A scroll position that ensures that all of the pages in the second row
|
||||
// are visible, but the tall ones are a tiny bit cut off
|
||||
const top2 =
|
||||
20 +
|
||||
SPACING + // height of the first row
|
||||
10; // a value greater than 0 but less than 30
|
||||
|
||||
// These tests refer to cases enumerated in the comments of
|
||||
// backtrackBeforeAllVisibleElements.
|
||||
it("handles case 1", function () {
|
||||
const pages = makePages([
|
||||
[
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
],
|
||||
[tallPage, shortPage, tallPage, shortPage],
|
||||
[
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
],
|
||||
[
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
],
|
||||
[[10, 20]],
|
||||
]);
|
||||
// binary search would land on the second row, first page
|
||||
const bsResult = 4;
|
||||
expect(
|
||||
backtrackBeforeAllVisibleElements(bsResult, pages, top1)
|
||||
).toEqual(4);
|
||||
});
|
||||
|
||||
it("handles case 2", function () {
|
||||
const pages = makePages([
|
||||
[
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
],
|
||||
[tallPage, shortPage, tallPage, tallPage],
|
||||
[
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
],
|
||||
[
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
],
|
||||
]);
|
||||
// binary search would land on the second row, third page
|
||||
const bsResult = 6;
|
||||
expect(
|
||||
backtrackBeforeAllVisibleElements(bsResult, pages, top1)
|
||||
).toEqual(4);
|
||||
});
|
||||
|
||||
it("handles case 3", function () {
|
||||
const pages = makePages([
|
||||
[
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
],
|
||||
[tallPage, shortPage, tallPage, shortPage],
|
||||
[
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
],
|
||||
[
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
],
|
||||
]);
|
||||
// binary search would land on the third row, first page
|
||||
const bsResult = 8;
|
||||
expect(
|
||||
backtrackBeforeAllVisibleElements(bsResult, pages, top1)
|
||||
).toEqual(4);
|
||||
});
|
||||
|
||||
it("handles case 4", function () {
|
||||
const pages = makePages([
|
||||
[
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
],
|
||||
[tallPage, shortPage, tallPage, shortPage],
|
||||
[
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
[10, 50],
|
||||
],
|
||||
[
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
[10, 20],
|
||||
],
|
||||
]);
|
||||
// binary search would land on the second row, first page
|
||||
const bsResult = 4;
|
||||
expect(
|
||||
backtrackBeforeAllVisibleElements(bsResult, pages, top2)
|
||||
).toEqual(4);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("calcRound", function () {
|
||||
it("should handle different browsers/environments correctly", function () {
|
||||
if (
|
||||
typeof window !== "undefined" &&
|
||||
window.navigator?.userAgent?.includes("Firefox")
|
||||
) {
|
||||
expect(calcRound(1.6)).not.toEqual(1.6);
|
||||
} else {
|
||||
expect(calcRound(1.6)).toEqual(1.6);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
161
test/unit/unicode_spec.js
Normal file
161
test/unit/unicode_spec.js
Normal file
@@ -0,0 +1,161 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
getCharUnicodeCategory,
|
||||
getUnicodeForGlyph,
|
||||
getUnicodeRangeFor,
|
||||
mapSpecialUnicodeValues,
|
||||
} from "../../src/core/unicode.js";
|
||||
import {
|
||||
getDingbatsGlyphsUnicode,
|
||||
getGlyphsUnicode,
|
||||
} from "../../src/core/glyphlist.js";
|
||||
|
||||
describe("unicode", function () {
|
||||
describe("mapSpecialUnicodeValues", function () {
|
||||
it("should not re-map normal Unicode values", function () {
|
||||
// A
|
||||
expect(mapSpecialUnicodeValues(0x0041)).toEqual(0x0041);
|
||||
// fi
|
||||
expect(mapSpecialUnicodeValues(0xfb01)).toEqual(0xfb01);
|
||||
});
|
||||
|
||||
it("should re-map special Unicode values", function () {
|
||||
// copyrightsans => copyright
|
||||
expect(mapSpecialUnicodeValues(0xf8e9)).toEqual(0x00a9);
|
||||
// Private Use Area characters
|
||||
expect(mapSpecialUnicodeValues(0xffff)).toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getCharUnicodeCategory", function () {
|
||||
it("should correctly determine the character category", function () {
|
||||
const tests = {
|
||||
// Whitespace
|
||||
" ": {
|
||||
isZeroWidthDiacritic: false,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: true,
|
||||
},
|
||||
"\t": {
|
||||
isZeroWidthDiacritic: false,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: true,
|
||||
},
|
||||
"\u2001": {
|
||||
isZeroWidthDiacritic: false,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: true,
|
||||
},
|
||||
"\uFEFF": {
|
||||
isZeroWidthDiacritic: false,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: true,
|
||||
},
|
||||
|
||||
// Diacritic
|
||||
"\u0302": {
|
||||
isZeroWidthDiacritic: true,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: false,
|
||||
},
|
||||
"\u0344": {
|
||||
isZeroWidthDiacritic: true,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: false,
|
||||
},
|
||||
"\u0361": {
|
||||
isZeroWidthDiacritic: true,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: false,
|
||||
},
|
||||
|
||||
// Invisible format mark
|
||||
"\u200B": {
|
||||
isZeroWidthDiacritic: false,
|
||||
isInvisibleFormatMark: true,
|
||||
isWhitespace: false,
|
||||
},
|
||||
"\u200D": {
|
||||
isZeroWidthDiacritic: false,
|
||||
isInvisibleFormatMark: true,
|
||||
isWhitespace: false,
|
||||
},
|
||||
|
||||
// No whitespace or diacritic or invisible format mark
|
||||
a: {
|
||||
isZeroWidthDiacritic: false,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: false,
|
||||
},
|
||||
1: {
|
||||
isZeroWidthDiacritic: false,
|
||||
isInvisibleFormatMark: false,
|
||||
isWhitespace: false,
|
||||
},
|
||||
};
|
||||
for (const [character, expectation] of Object.entries(tests)) {
|
||||
expect(getCharUnicodeCategory(character)).toEqual(expectation);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("getUnicodeForGlyph", function () {
|
||||
let standardMap, dingbatsMap;
|
||||
|
||||
beforeAll(function () {
|
||||
standardMap = getGlyphsUnicode();
|
||||
dingbatsMap = getDingbatsGlyphsUnicode();
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
standardMap = dingbatsMap = null;
|
||||
});
|
||||
|
||||
it("should get Unicode values for valid glyph names", function () {
|
||||
expect(getUnicodeForGlyph("A", standardMap)).toEqual(0x0041);
|
||||
expect(getUnicodeForGlyph("a1", dingbatsMap)).toEqual(0x2701);
|
||||
});
|
||||
|
||||
it("should recover Unicode values from uniXXXX/uXXXX{XX} glyph names", function () {
|
||||
expect(getUnicodeForGlyph("uni0041", standardMap)).toEqual(0x0041);
|
||||
expect(getUnicodeForGlyph("u0041", standardMap)).toEqual(0x0041);
|
||||
|
||||
expect(getUnicodeForGlyph("uni2701", dingbatsMap)).toEqual(0x2701);
|
||||
expect(getUnicodeForGlyph("u2701", dingbatsMap)).toEqual(0x2701);
|
||||
});
|
||||
|
||||
it("should not get Unicode values for invalid glyph names", function () {
|
||||
expect(getUnicodeForGlyph("Qwerty", standardMap)).toEqual(-1);
|
||||
expect(getUnicodeForGlyph("Qwerty", dingbatsMap)).toEqual(-1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getUnicodeRangeFor", function () {
|
||||
it("should get correct Unicode range", function () {
|
||||
// A (Basic Latin)
|
||||
expect(getUnicodeRangeFor(0x0041)).toEqual(0);
|
||||
// fi (Alphabetic Presentation Forms)
|
||||
expect(getUnicodeRangeFor(0xfb01)).toEqual(62);
|
||||
// Combining diacritic (Cyrillic Extended-A)
|
||||
expect(getUnicodeRangeFor(0x2dff)).toEqual(9);
|
||||
});
|
||||
|
||||
it("should not get a Unicode range", function () {
|
||||
expect(getUnicodeRangeFor(0xaa60)).toEqual(-1);
|
||||
});
|
||||
});
|
||||
});
|
||||
56
test/unit/unit_test.html
Normal file
56
test/unit/unit_test.html
Normal file
@@ -0,0 +1,56 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>PDF.js unit tests</title>
|
||||
|
||||
<link rel="stylesheet" type="text/css" href="../../node_modules/jasmine-core/lib/jasmine-core/jasmine.css">
|
||||
|
||||
<script src="../../node_modules/jasmine-core/lib/jasmine-core/jasmine.js"></script>
|
||||
<script src="../../node_modules/jasmine-core/lib/jasmine-core/jasmine-html.js"></script>
|
||||
|
||||
<script type="importmap">
|
||||
{
|
||||
"imports": {
|
||||
"pdfjs/": "../../src/",
|
||||
"pdfjs-lib": "../../src/pdf.js",
|
||||
"pdfjs-web/": "../../web/",
|
||||
"pdfjs-test/": "../",
|
||||
|
||||
"fluent-bundle": "../../node_modules/@fluent/bundle/esm/index.js",
|
||||
"fluent-dom": "../../node_modules/@fluent/dom/esm/index.js",
|
||||
"cached-iterable": "../../node_modules/cached-iterable/src/index.mjs",
|
||||
|
||||
"display-cmap_reader_factory": "../../src/display/cmap_reader_factory.js",
|
||||
"display-standard_fontdata_factory": "../../src/display/standard_fontdata_factory.js",
|
||||
"display-wasm_factory": "../../src/display/wasm_factory.js",
|
||||
"display-fetch_stream": "../../src/display/fetch_stream.js",
|
||||
"display-network": "../../src/display/network.js",
|
||||
"display-node_stream": "../../src/display/stubs.js",
|
||||
"display-node_utils": "../../src/display/stubs.js",
|
||||
|
||||
"web-alt_text_manager": "../../web/alt_text_manager.js",
|
||||
"web-annotation_editor_params": "../../web/annotation_editor_params.js",
|
||||
"web-download_manager": "../../web/download_manager.js",
|
||||
"web-external_services": "../../web/genericcom.js",
|
||||
"web-null_l10n": "../../web/genericl10n.js",
|
||||
"web-pdf_attachment_viewer": "../../web/pdf_attachment_viewer.js",
|
||||
"web-pdf_cursor_tools": "../../web/pdf_cursor_tools.js",
|
||||
"web-pdf_document_properties": "../../web/pdf_document_properties.js",
|
||||
"web-pdf_find_bar": "../../web/pdf_find_bar.js",
|
||||
"web-pdf_layer_viewer": "../../web/pdf_layer_viewer.js",
|
||||
"web-pdf_outline_viewer": "../../web/pdf_outline_viewer.js",
|
||||
"web-pdf_presentation_mode": "../../web/pdf_presentation_mode.js",
|
||||
"web-pdf_sidebar": "../../web/pdf_sidebar.js",
|
||||
"web-pdf_thumbnail_viewer": "../../web/pdf_thumbnail_viewer.js",
|
||||
"web-preferences": "../../web/genericcom.js",
|
||||
"web-print_service": "../../web/pdf_print_service.js",
|
||||
"web-secondary_toolbar": "../../web/secondary_toolbar.js",
|
||||
"web-toolbar": "../../web/toolbar.js"
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<script src="jasmine-boot.js" type="module"></script>
|
||||
</head>
|
||||
<body>
|
||||
</body>
|
||||
</html>
|
||||
261
test/unit/util_spec.js
Normal file
261
test/unit/util_spec.js
Normal file
@@ -0,0 +1,261 @@
|
||||
/* Copyright 2017 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
BaseException,
|
||||
bytesToString,
|
||||
createValidAbsoluteUrl,
|
||||
getModificationDate,
|
||||
getUuid,
|
||||
string32,
|
||||
stringToBytes,
|
||||
stringToPDFString,
|
||||
} from "../../src/shared/util.js";
|
||||
|
||||
describe("util", function () {
|
||||
describe("BaseException", function () {
|
||||
it("can initialize exception classes derived from BaseException", function () {
|
||||
class DerivedException extends BaseException {
|
||||
constructor(message) {
|
||||
super(message, "DerivedException");
|
||||
this.foo = "bar";
|
||||
}
|
||||
}
|
||||
|
||||
const exception = new DerivedException("Something went wrong");
|
||||
expect(exception instanceof DerivedException).toEqual(true);
|
||||
expect(exception instanceof BaseException).toEqual(true);
|
||||
expect(exception.message).toEqual("Something went wrong");
|
||||
expect(exception.name).toEqual("DerivedException");
|
||||
expect(exception.foo).toEqual("bar");
|
||||
expect(exception.stack).toContain("BaseExceptionClosure");
|
||||
});
|
||||
});
|
||||
|
||||
describe("bytesToString", function () {
|
||||
it("handles non-array arguments", function () {
|
||||
expect(function () {
|
||||
bytesToString(null);
|
||||
}).toThrow(new Error("Invalid argument for bytesToString"));
|
||||
});
|
||||
|
||||
it("handles array arguments with a length not exceeding the maximum", function () {
|
||||
expect(bytesToString(new Uint8Array([]))).toEqual("");
|
||||
expect(bytesToString(new Uint8Array([102, 111, 111]))).toEqual("foo");
|
||||
});
|
||||
|
||||
it("handles array arguments with a length exceeding the maximum", function () {
|
||||
const length = 10000; // Larger than MAX_ARGUMENT_COUNT = 8192.
|
||||
|
||||
// Create an array with `length` 'a' character codes.
|
||||
const bytes = new Uint8Array(length);
|
||||
for (let i = 0; i < length; i++) {
|
||||
bytes[i] = "a".charCodeAt(0);
|
||||
}
|
||||
|
||||
// Create a string with `length` 'a' characters.
|
||||
const string = "a".repeat(length);
|
||||
|
||||
expect(bytesToString(bytes)).toEqual(string);
|
||||
});
|
||||
});
|
||||
|
||||
describe("string32", function () {
|
||||
it("converts unsigned 32-bit integers to strings", function () {
|
||||
expect(string32(0x74727565)).toEqual("true");
|
||||
expect(string32(0x74797031)).toEqual("typ1");
|
||||
expect(string32(0x4f54544f)).toEqual("OTTO");
|
||||
});
|
||||
});
|
||||
|
||||
describe("stringToBytes", function () {
|
||||
it("handles non-string arguments", function () {
|
||||
expect(function () {
|
||||
stringToBytes(null);
|
||||
}).toThrow(new Error("Invalid argument for stringToBytes"));
|
||||
});
|
||||
|
||||
it("handles string arguments", function () {
|
||||
expect(stringToBytes("")).toEqual(new Uint8Array([]));
|
||||
expect(stringToBytes("foo")).toEqual(new Uint8Array([102, 111, 111]));
|
||||
});
|
||||
});
|
||||
|
||||
describe("stringToPDFString", function () {
|
||||
it("handles ISO Latin 1 strings", function () {
|
||||
const str = "\x8Dstring\x8E";
|
||||
expect(stringToPDFString(str)).toEqual("\u201Cstring\u201D");
|
||||
});
|
||||
|
||||
it("handles UTF-16 big-endian strings", function () {
|
||||
const str = "\xFE\xFF\x00\x73\x00\x74\x00\x72\x00\x69\x00\x6E\x00\x67";
|
||||
expect(stringToPDFString(str)).toEqual("string");
|
||||
});
|
||||
|
||||
it("handles incomplete UTF-16 big-endian strings", function () {
|
||||
const str = "\xFE\xFF\x00\x73\x00\x74\x00\x72\x00\x69\x00\x6E\x00";
|
||||
expect(stringToPDFString(str)).toEqual("strin");
|
||||
});
|
||||
|
||||
it("handles UTF-16 little-endian strings", function () {
|
||||
const str = "\xFF\xFE\x73\x00\x74\x00\x72\x00\x69\x00\x6E\x00\x67\x00";
|
||||
expect(stringToPDFString(str)).toEqual("string");
|
||||
});
|
||||
|
||||
it("handles incomplete UTF-16 little-endian strings", function () {
|
||||
const str = "\xFF\xFE\x73\x00\x74\x00\x72\x00\x69\x00\x6E\x00\x67";
|
||||
expect(stringToPDFString(str)).toEqual("strin");
|
||||
});
|
||||
|
||||
it("handles UTF-8 strings", function () {
|
||||
const simpleStr = "\xEF\xBB\xBF\x73\x74\x72\x69\x6E\x67";
|
||||
expect(stringToPDFString(simpleStr)).toEqual("string");
|
||||
|
||||
const complexStr =
|
||||
"\xEF\xBB\xBF\xE8\xA1\xA8\xE3\x83\x9D\xE3\x81\x82\x41\xE9\xB7\x97" +
|
||||
"\xC5\x92\xC3\xA9\xEF\xBC\xA2\xE9\x80\x8D\xC3\x9C\xC3\x9F\xC2\xAA" +
|
||||
"\xC4\x85\xC3\xB1\xE4\xB8\x82\xE3\x90\x80\xF0\xA0\x80\x80";
|
||||
expect(stringToPDFString(complexStr)).toEqual(
|
||||
"表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀"
|
||||
);
|
||||
});
|
||||
|
||||
it("handles empty strings", function () {
|
||||
// ISO Latin 1
|
||||
const str1 = "";
|
||||
expect(stringToPDFString(str1)).toEqual("");
|
||||
|
||||
// UTF-16BE
|
||||
const str2 = "\xFE\xFF";
|
||||
expect(stringToPDFString(str2)).toEqual("");
|
||||
|
||||
// UTF-16LE
|
||||
const str3 = "\xFF\xFE";
|
||||
expect(stringToPDFString(str3)).toEqual("");
|
||||
|
||||
// UTF-8
|
||||
const str4 = "\xEF\xBB\xBF";
|
||||
expect(stringToPDFString(str4)).toEqual("");
|
||||
});
|
||||
|
||||
it("handles strings with language code", function () {
|
||||
// ISO Latin 1
|
||||
const str1 = "hello \x1benUS\x1bworld";
|
||||
expect(stringToPDFString(str1)).toEqual("hello world");
|
||||
|
||||
// UTF-16BE
|
||||
const str2 =
|
||||
"\xFE\xFF\x00h\x00e\x00l\x00l\x00o\x00 \x00\x1b\x00e\x00n\x00U\x00S\x00\x1b\x00w\x00o\x00r\x00l\x00d";
|
||||
expect(stringToPDFString(str2)).toEqual("hello world");
|
||||
|
||||
// UTF-16LE
|
||||
const str3 =
|
||||
"\xFF\xFEh\x00e\x00l\x00l\x00o\x00 \x00\x1b\x00e\x00n\x00U\x00S\x00\x1b\x00w\x00o\x00r\x00l\x00d\x00";
|
||||
expect(stringToPDFString(str3)).toEqual("hello world");
|
||||
});
|
||||
});
|
||||
|
||||
describe("ReadableStream", function () {
|
||||
it("should return an Object", function () {
|
||||
const readable = new ReadableStream();
|
||||
expect(typeof readable).toEqual("object");
|
||||
});
|
||||
|
||||
it("should have property getReader", function () {
|
||||
const readable = new ReadableStream();
|
||||
expect(typeof readable.getReader).toEqual("function");
|
||||
});
|
||||
});
|
||||
|
||||
describe("URL", function () {
|
||||
it("should return an Object", function () {
|
||||
const url = new URL("https://example.com");
|
||||
expect(typeof url).toEqual("object");
|
||||
});
|
||||
|
||||
it("should have property `href`", function () {
|
||||
const url = new URL("https://example.com");
|
||||
expect(typeof url.href).toEqual("string");
|
||||
});
|
||||
});
|
||||
|
||||
describe("createValidAbsoluteUrl", function () {
|
||||
it("handles invalid URLs", function () {
|
||||
expect(createValidAbsoluteUrl(undefined, undefined)).toEqual(null);
|
||||
expect(createValidAbsoluteUrl(null, null)).toEqual(null);
|
||||
expect(createValidAbsoluteUrl("/foo", "/bar")).toEqual(null);
|
||||
});
|
||||
|
||||
it("handles URLs that do not use an allowed protocol", function () {
|
||||
expect(createValidAbsoluteUrl("magnet:?foo", null)).toEqual(null);
|
||||
});
|
||||
|
||||
it("correctly creates a valid URL for allowed protocols", function () {
|
||||
// `http` protocol
|
||||
expect(
|
||||
createValidAbsoluteUrl("http://www.mozilla.org/foo", null)
|
||||
).toEqual(new URL("http://www.mozilla.org/foo"));
|
||||
expect(createValidAbsoluteUrl("/foo", "http://www.mozilla.org")).toEqual(
|
||||
new URL("http://www.mozilla.org/foo")
|
||||
);
|
||||
|
||||
// `https` protocol
|
||||
expect(
|
||||
createValidAbsoluteUrl("https://www.mozilla.org/foo", null)
|
||||
).toEqual(new URL("https://www.mozilla.org/foo"));
|
||||
expect(createValidAbsoluteUrl("/foo", "https://www.mozilla.org")).toEqual(
|
||||
new URL("https://www.mozilla.org/foo")
|
||||
);
|
||||
|
||||
// `ftp` protocol
|
||||
expect(createValidAbsoluteUrl("ftp://www.mozilla.org/foo", null)).toEqual(
|
||||
new URL("ftp://www.mozilla.org/foo")
|
||||
);
|
||||
expect(createValidAbsoluteUrl("/foo", "ftp://www.mozilla.org")).toEqual(
|
||||
new URL("ftp://www.mozilla.org/foo")
|
||||
);
|
||||
|
||||
// `mailto` protocol (base URLs have no meaning and should yield `null`)
|
||||
expect(createValidAbsoluteUrl("mailto:foo@bar.baz", null)).toEqual(
|
||||
new URL("mailto:foo@bar.baz")
|
||||
);
|
||||
expect(createValidAbsoluteUrl("/foo", "mailto:foo@bar.baz")).toEqual(
|
||||
null
|
||||
);
|
||||
|
||||
// `tel` protocol (base URLs have no meaning and should yield `null`)
|
||||
expect(createValidAbsoluteUrl("tel:+0123456789", null)).toEqual(
|
||||
new URL("tel:+0123456789")
|
||||
);
|
||||
expect(createValidAbsoluteUrl("/foo", "tel:0123456789")).toEqual(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getModificationDate", function () {
|
||||
it("should get a correctly formatted date", function () {
|
||||
const date = new Date(Date.UTC(3141, 5, 9, 2, 6, 53));
|
||||
expect(getModificationDate(date)).toEqual("31410609020653");
|
||||
expect(getModificationDate(date.toString())).toEqual("31410609020653");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getUuid", function () {
|
||||
it("should get uuid string", function () {
|
||||
const uuid = getUuid();
|
||||
expect(typeof uuid).toEqual("string");
|
||||
expect(uuid.length).toBeGreaterThanOrEqual(32);
|
||||
});
|
||||
});
|
||||
});
|
||||
329
test/unit/writer_spec.js
Normal file
329
test/unit/writer_spec.js
Normal file
@@ -0,0 +1,329 @@
|
||||
/* Copyright 2020 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Dict, Name, Ref, RefSetCache } from "../../src/core/primitives.js";
|
||||
import { incrementalUpdate, writeDict } from "../../src/core/writer.js";
|
||||
import { bytesToString } from "../../src/shared/util.js";
|
||||
import { StringStream } from "../../src/core/stream.js";
|
||||
|
||||
describe("Writer", function () {
|
||||
beforeAll(function () {
|
||||
jasmine.clock().install();
|
||||
jasmine.clock().mockDate(new Date(0));
|
||||
});
|
||||
|
||||
afterAll(function () {
|
||||
jasmine.clock().uninstall();
|
||||
});
|
||||
|
||||
describe("Incremental update", function () {
|
||||
it("should update a file with new objects", async function () {
|
||||
const originalData = new Uint8Array();
|
||||
const changes = new RefSetCache();
|
||||
changes.put(Ref.get(123, 0x2d), { data: "abc\n" });
|
||||
changes.put(Ref.get(456, 0x4e), { data: "defg\n" });
|
||||
const xrefInfo = {
|
||||
newRef: Ref.get(789, 0),
|
||||
startXRef: 314,
|
||||
fileIds: ["id", ""],
|
||||
rootRef: null,
|
||||
infoRef: null,
|
||||
encryptRef: null,
|
||||
filename: "foo.pdf",
|
||||
infoMap: new Map(),
|
||||
};
|
||||
|
||||
let data = await incrementalUpdate({
|
||||
originalData,
|
||||
xrefInfo,
|
||||
changes,
|
||||
xref: {},
|
||||
useXrefStream: true,
|
||||
});
|
||||
data = bytesToString(data);
|
||||
|
||||
let expected =
|
||||
"\nabc\n" +
|
||||
"defg\n" +
|
||||
"789 0 obj\n" +
|
||||
"<< /Prev 314 /Size 790 /Type /XRef /Index [123 1 456 1 789 1] " +
|
||||
"/W [1 1 1] /ID [(id) (\xeb\x4b\x2a\xe7\x31\x36\xf0\xcd\x83\x35\x94\x2a\x36\xcf\xaa\xb0)] " +
|
||||
"/Length 9>> stream\n" +
|
||||
"\x01\x01\x2d" +
|
||||
"\x01\x05\x4e" +
|
||||
"\x01\x0a\x00\n" +
|
||||
"endstream\n" +
|
||||
"endobj\n" +
|
||||
"startxref\n" +
|
||||
"10\n" +
|
||||
"%%EOF\n";
|
||||
expect(data).toEqual(expected);
|
||||
|
||||
data = await incrementalUpdate({
|
||||
originalData,
|
||||
xrefInfo,
|
||||
changes,
|
||||
xref: {},
|
||||
useXrefStream: false,
|
||||
});
|
||||
data = bytesToString(data);
|
||||
|
||||
expected =
|
||||
"\nabc\n" +
|
||||
"defg\n" +
|
||||
"xref\n" +
|
||||
"123 1\n" +
|
||||
"0000000001 00045 n\r\n" +
|
||||
"456 1\n" +
|
||||
"0000000005 00078 n\r\n" +
|
||||
"789 1\n" +
|
||||
"0000000010 00000 n\r\n" +
|
||||
"trailer\n" +
|
||||
"<< /Prev 314 /Size 789 " +
|
||||
"/ID [(id) (\xeb\x4b\x2a\xe7\x31\x36\xf0\xcd\x83\x35\x94\x2a\x36\xcf\xaa\xb0)]>>\n" +
|
||||
"startxref\n" +
|
||||
"10\n" +
|
||||
"%%EOF\n";
|
||||
expect(data).toEqual(expected);
|
||||
});
|
||||
|
||||
it("should update a file, missing the /ID-entry, with new objects", async function () {
|
||||
const originalData = new Uint8Array();
|
||||
const changes = new RefSetCache();
|
||||
changes.put(Ref.get(123, 0x2d), { data: "abc\n" });
|
||||
const xrefInfo = {
|
||||
newRef: Ref.get(789, 0),
|
||||
startXRef: 314,
|
||||
fileIds: null,
|
||||
rootRef: null,
|
||||
infoRef: null,
|
||||
encryptRef: null,
|
||||
filename: "foo.pdf",
|
||||
infoMap: new Map(),
|
||||
};
|
||||
|
||||
let data = await incrementalUpdate({
|
||||
originalData,
|
||||
xrefInfo,
|
||||
changes,
|
||||
xref: {},
|
||||
useXrefStream: true,
|
||||
});
|
||||
data = bytesToString(data);
|
||||
|
||||
const expected =
|
||||
"\nabc\n" +
|
||||
"789 0 obj\n" +
|
||||
"<< /Prev 314 /Size 790 /Type /XRef /Index [123 1 789 1] " +
|
||||
"/W [1 1 1] /Length 6>> stream\n" +
|
||||
"\x01\x01\x2d" +
|
||||
"\x01\x05\x00\n" +
|
||||
"endstream\n" +
|
||||
"endobj\n" +
|
||||
"startxref\n" +
|
||||
"5\n" +
|
||||
"%%EOF\n";
|
||||
|
||||
expect(data).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("writeDict", function () {
|
||||
it("should write a Dict", async function () {
|
||||
const dict = new Dict(null);
|
||||
dict.set("A", Name.get("B"));
|
||||
dict.set("B", Ref.get(123, 456));
|
||||
dict.set("C", 789);
|
||||
dict.set("D", "hello world");
|
||||
dict.set("E", "(hello\\world)");
|
||||
dict.set("F", [1.23001, 4.50001, 6]);
|
||||
|
||||
const gdict = new Dict(null);
|
||||
gdict.set("H", 123.00001);
|
||||
const string = "a stream";
|
||||
const stream = new StringStream(string);
|
||||
stream.dict = new Dict(null);
|
||||
stream.dict.set("Length", string.length);
|
||||
gdict.set("I", stream);
|
||||
|
||||
dict.set("G", gdict);
|
||||
dict.set("J", true);
|
||||
dict.set("K", false);
|
||||
|
||||
dict.set("NullArr", [null, 10]);
|
||||
dict.set("NullVal", null);
|
||||
|
||||
const buffer = [];
|
||||
await writeDict(dict, buffer, null);
|
||||
|
||||
const expected =
|
||||
"<< /A /B /B 123 456 R /C 789 /D (hello world) " +
|
||||
"/E (\\(hello\\\\world\\)) /F [1.23 4.5 6] " +
|
||||
"/G << /H 123 /I << /Length 8>> stream\n" +
|
||||
"a stream\n" +
|
||||
"endstream>> /J true /K false " +
|
||||
"/NullArr [null 10] /NullVal null>>";
|
||||
|
||||
expect(buffer.join("")).toEqual(expected);
|
||||
});
|
||||
|
||||
it("should write a Dict in escaping PDF names", async function () {
|
||||
const dict = new Dict(null);
|
||||
dict.set("\xfeA#", Name.get("hello"));
|
||||
dict.set("B", Name.get("#hello"));
|
||||
dict.set("C", Name.get("he\xfello\xff"));
|
||||
|
||||
const buffer = [];
|
||||
await writeDict(dict, buffer, null);
|
||||
|
||||
const expected = "<< /#feA#23 /hello /B /#23hello /C /he#fello#ff>>";
|
||||
|
||||
expect(buffer.join("")).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("XFA", function () {
|
||||
it("should update AcroForm when no datasets in XFA array", async function () {
|
||||
const originalData = new Uint8Array();
|
||||
const changes = new RefSetCache();
|
||||
|
||||
const acroForm = new Dict(null);
|
||||
acroForm.set("XFA", [
|
||||
"preamble",
|
||||
Ref.get(123, 0),
|
||||
"postamble",
|
||||
Ref.get(456, 0),
|
||||
]);
|
||||
const acroFormRef = Ref.get(789, 0);
|
||||
const xfaDatasetsRef = Ref.get(101112, 0);
|
||||
const xfaData = "<hello>world</hello>";
|
||||
|
||||
const xrefInfo = {
|
||||
newRef: Ref.get(131415, 0),
|
||||
startXRef: 314,
|
||||
fileIds: null,
|
||||
rootRef: null,
|
||||
infoRef: null,
|
||||
encryptRef: null,
|
||||
filename: "foo.pdf",
|
||||
infoMap: new Map(),
|
||||
};
|
||||
|
||||
let data = await incrementalUpdate({
|
||||
originalData,
|
||||
xrefInfo,
|
||||
changes,
|
||||
hasXfa: true,
|
||||
xfaDatasetsRef,
|
||||
hasXfaDatasetsEntry: false,
|
||||
acroFormRef,
|
||||
acroForm,
|
||||
xfaData,
|
||||
xref: {},
|
||||
useXrefStream: true,
|
||||
});
|
||||
data = bytesToString(data);
|
||||
|
||||
const expected =
|
||||
"\n" +
|
||||
"789 0 obj\n" +
|
||||
"<< /XFA [(preamble) 123 0 R (datasets) 101112 0 R (postamble) 456 0 R]>>\n" +
|
||||
"endobj\n" +
|
||||
"101112 0 obj\n" +
|
||||
"<< /Type /EmbeddedFile /Length 20>> stream\n" +
|
||||
"<hello>world</hello>\n" +
|
||||
"endstream\n" +
|
||||
"endobj\n" +
|
||||
"131415 0 obj\n" +
|
||||
"<< /Prev 314 /Size 131416 /Type /XRef /Index [789 1 101112 1 131415 1] /W [1 1 0] /Length 6>> stream\n" +
|
||||
"\x01\x01\x01[\x01¹\n" +
|
||||
"endstream\n" +
|
||||
"endobj\n" +
|
||||
"startxref\n" +
|
||||
"185\n" +
|
||||
"%%EOF\n";
|
||||
|
||||
expect(data).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
it("should update a file with a deleted object", async function () {
|
||||
const originalData = new Uint8Array();
|
||||
const changes = new RefSetCache();
|
||||
changes.put(Ref.get(123, 0x2d), { data: null });
|
||||
changes.put(Ref.get(456, 0x4e), { data: "abc\n" });
|
||||
const xrefInfo = {
|
||||
newRef: Ref.get(789, 0),
|
||||
startXRef: 314,
|
||||
fileIds: ["id", ""],
|
||||
rootRef: null,
|
||||
infoRef: null,
|
||||
encryptRef: null,
|
||||
filename: "foo.pdf",
|
||||
infoMap: new Map(),
|
||||
};
|
||||
|
||||
let data = await incrementalUpdate({
|
||||
originalData,
|
||||
xrefInfo,
|
||||
changes,
|
||||
xref: {},
|
||||
useXrefStream: true,
|
||||
});
|
||||
data = bytesToString(data);
|
||||
|
||||
let expected =
|
||||
"\nabc\n" +
|
||||
"789 0 obj\n" +
|
||||
"<< /Prev 314 /Size 790 /Type /XRef /Index [123 1 456 1 789 1] " +
|
||||
"/W [1 1 1] /ID [(id) (\x5f\xd1\x43\x8e\xf8\x62\x79\x80\xbb\xd6\xf7\xb6\xd2\xb5\x6f\xd8)] " +
|
||||
"/Length 9>> stream\n" +
|
||||
"\x00\x00\x2e" +
|
||||
"\x01\x01\x4e" +
|
||||
"\x01\x05\x00\n" +
|
||||
"endstream\n" +
|
||||
"endobj\n" +
|
||||
"startxref\n" +
|
||||
"5\n" +
|
||||
"%%EOF\n";
|
||||
expect(data).toEqual(expected);
|
||||
|
||||
data = await incrementalUpdate({
|
||||
originalData,
|
||||
xrefInfo,
|
||||
changes,
|
||||
xref: {},
|
||||
useXrefStream: false,
|
||||
});
|
||||
data = bytesToString(data);
|
||||
|
||||
expected =
|
||||
"\nabc\n" +
|
||||
"xref\n" +
|
||||
"123 1\n" +
|
||||
"0000000000 00046 f\r\n" +
|
||||
"456 1\n" +
|
||||
"0000000001 00078 n\r\n" +
|
||||
"789 1\n" +
|
||||
"0000000005 00000 n\r\n" +
|
||||
"trailer\n" +
|
||||
"<< /Prev 314 /Size 789 " +
|
||||
"/ID [(id) (\x5f\xd1\x43\x8e\xf8\x62\x79\x80\xbb\xd6\xf7\xb6\xd2\xb5\x6f\xd8)]>>\n" +
|
||||
"startxref\n" +
|
||||
"5\n" +
|
||||
"%%EOF\n";
|
||||
expect(data).toEqual(expected);
|
||||
});
|
||||
});
|
||||
740
test/unit/xfa_formcalc_spec.js
Normal file
740
test/unit/xfa_formcalc_spec.js
Normal file
@@ -0,0 +1,740 @@
|
||||
/* Copyright 2020 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Errors, Parser } from "../../src/core/xfa/formcalc_parser.js";
|
||||
import { Lexer, Token, TOKEN } from "../../src/core/xfa/formcalc_lexer.js";
|
||||
|
||||
describe("FormCalc expression parser", function () {
|
||||
const EOF = new Token(TOKEN.eof);
|
||||
|
||||
describe("FormCalc lexer", function () {
|
||||
it("should lex numbers", function () {
|
||||
const lexer = new Lexer(
|
||||
"1 7 12 1.2345 .7 .12345 1e-2 1.2E+3 1e2 1.2E3 nan 12. 2.e3 infinity 99999999999999999 123456789.012345678 9e99999"
|
||||
);
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 1));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 7));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 12));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 1.2345));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 0.7));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 0.12345));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 1e-2));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 1.2e3));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 1e2));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 1.2e3));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, NaN));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 12));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 2e3));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, Infinity));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 100000000000000000));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 123456789.01234567));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, Infinity));
|
||||
expect(lexer.next()).toEqual(EOF);
|
||||
});
|
||||
|
||||
it("should lex strings", function () {
|
||||
const lexer = new Lexer(
|
||||
`"hello world" "hello ""world" "hello ""world"" ""world""""hello""" "hello \\uabcdeh \\Uabcd \\u00000123abc" "a \\a \\ub \\Uc \\b"`
|
||||
);
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.string, `hello world`));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.string, `hello "world`));
|
||||
expect(lexer.next()).toEqual(
|
||||
new Token(TOKEN.string, `hello "world" "world""hello"`)
|
||||
);
|
||||
expect(lexer.next()).toEqual(
|
||||
new Token(TOKEN.string, `hello \uabcdeh \uabcd \u0123abc`)
|
||||
);
|
||||
expect(lexer.next()).toEqual(
|
||||
new Token(TOKEN.string, `a \\a \\ub \\Uc \\b`)
|
||||
);
|
||||
expect(lexer.next()).toEqual(EOF);
|
||||
});
|
||||
|
||||
it("should lex operators", function () {
|
||||
const lexer = new Lexer("( , ) <= <> = == >= < > / * . .* .# [ ] & |");
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.leftParen));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.comma));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.rightParen));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.le));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.ne));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.assign));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.eq));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.ge));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.lt));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.gt));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.divide));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.times));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.dot));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.dotStar));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.dotHash));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.leftBracket));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.rightBracket));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.and));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.or));
|
||||
expect(lexer.next()).toEqual(EOF);
|
||||
});
|
||||
|
||||
it("should skip comments", function () {
|
||||
const lexer = new Lexer(`
|
||||
|
||||
\t\t 1 \r\n\r\n
|
||||
|
||||
; blah blah blah
|
||||
|
||||
2
|
||||
|
||||
// blah blah blah blah blah
|
||||
|
||||
|
||||
3
|
||||
`);
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 1));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 2));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.number, 3));
|
||||
expect(lexer.next()).toEqual(EOF);
|
||||
});
|
||||
|
||||
it("should lex identifiers", function () {
|
||||
const lexer = new Lexer(
|
||||
"eq for fore while continue hello こんにちは世界 $!hello今日は12今日は"
|
||||
);
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.eq));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.for));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.identifier, "fore"));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.while));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.continue));
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.identifier, "hello"));
|
||||
expect(lexer.next()).toEqual(
|
||||
new Token(TOKEN.identifier, "こんにちは世界")
|
||||
);
|
||||
expect(lexer.next()).toEqual(new Token(TOKEN.identifier, "$"));
|
||||
expect(lexer.next()).toEqual(
|
||||
new Token(TOKEN.identifier, "!hello今日は12今日は")
|
||||
);
|
||||
expect(lexer.next()).toEqual(EOF);
|
||||
});
|
||||
});
|
||||
|
||||
describe("FormCalc parser", function () {
|
||||
it("should parse basic arithmetic expression", function () {
|
||||
const parser = new Parser("1 + 2 * 3");
|
||||
expect(parser.parse().dump()[0]).toEqual(7);
|
||||
});
|
||||
|
||||
it("should parse basic arithmetic expression with the same operator", function () {
|
||||
const parser = new Parser("1 + a + 3");
|
||||
expect(parser.parse().dump()[0]).toEqual({
|
||||
operator: "+",
|
||||
left: {
|
||||
operator: "+",
|
||||
left: 1,
|
||||
right: { id: "a" },
|
||||
},
|
||||
right: 3,
|
||||
});
|
||||
});
|
||||
|
||||
it("should parse expressions with unary operators", function () {
|
||||
const parser = new Parser(`
|
||||
s = +x + 1
|
||||
t = -+u * 2
|
||||
t = +-u * 2
|
||||
u = -foo()
|
||||
`);
|
||||
expect(parser.parse().dump()).toEqual([
|
||||
{
|
||||
assignment: "s",
|
||||
expr: {
|
||||
operator: "+",
|
||||
left: { operator: "+", arg: { id: "x" } },
|
||||
right: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
assignment: "t",
|
||||
expr: {
|
||||
operator: "*",
|
||||
left: {
|
||||
operator: "-",
|
||||
arg: {
|
||||
operator: "+",
|
||||
arg: { id: "u" },
|
||||
},
|
||||
},
|
||||
right: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
assignment: "t",
|
||||
expr: {
|
||||
operator: "*",
|
||||
left: {
|
||||
operator: "+",
|
||||
arg: {
|
||||
operator: "-",
|
||||
arg: { id: "u" },
|
||||
},
|
||||
},
|
||||
right: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
assignment: "u",
|
||||
expr: {
|
||||
operator: "-",
|
||||
arg: {
|
||||
callee: { id: "foo" },
|
||||
params: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should parse basic expression with a string", function () {
|
||||
const parser = new Parser(`(5 - "abc") * 3`);
|
||||
expect(parser.parse().dump()[0]).toEqual(15);
|
||||
});
|
||||
|
||||
it("should parse basic expression with a calls", function () {
|
||||
const parser = new Parser(`foo(2, 3, a & b) or c * d + 1.234 / e`);
|
||||
expect(parser.parse().dump()[0]).toEqual({
|
||||
operator: "||",
|
||||
left: {
|
||||
callee: { id: "foo" },
|
||||
params: [
|
||||
2,
|
||||
3,
|
||||
{
|
||||
operator: "&&",
|
||||
left: { id: "a" },
|
||||
right: { id: "b" },
|
||||
},
|
||||
],
|
||||
},
|
||||
right: {
|
||||
operator: "+",
|
||||
left: {
|
||||
operator: "*",
|
||||
left: { id: "c" },
|
||||
right: { id: "d" },
|
||||
},
|
||||
right: {
|
||||
operator: "/",
|
||||
left: 1.234,
|
||||
right: { id: "e" },
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should parse basic expression with a subscript", function () {
|
||||
let parser = new Parser(`こんにちは世界[-0]`);
|
||||
let dump = parser.parse().dump()[0];
|
||||
expect(dump).toEqual({
|
||||
operand: { id: "こんにちは世界" },
|
||||
index: -0,
|
||||
});
|
||||
expect(Object.is(-0, dump.index)).toBe(true);
|
||||
|
||||
parser = new Parser(`こんにちは世界[+0]`);
|
||||
dump = parser.parse().dump()[0];
|
||||
expect(dump).toEqual({
|
||||
operand: { id: "こんにちは世界" },
|
||||
index: +0,
|
||||
});
|
||||
expect(Object.is(+0, dump.index)).toBe(true);
|
||||
|
||||
parser = new Parser(`こんにちは世界[*]`);
|
||||
expect(parser.parse().dump()[0]).toEqual({
|
||||
operand: { id: "こんにちは世界" },
|
||||
index: { special: "*" },
|
||||
});
|
||||
});
|
||||
|
||||
it("should parse basic expression with dots", function () {
|
||||
const parser = new Parser("a.b.c.#d..e.f..g.*");
|
||||
const exprlist = parser.parse();
|
||||
expect(exprlist.expressions[0].isDotExpression()).toEqual(true);
|
||||
expect(exprlist.dump()[0]).toEqual({
|
||||
operator: ".",
|
||||
left: { id: "a" },
|
||||
right: {
|
||||
operator: ".",
|
||||
left: { id: "b" },
|
||||
right: {
|
||||
operator: ".#",
|
||||
left: { id: "c" },
|
||||
right: {
|
||||
operator: "..",
|
||||
left: { id: "d" },
|
||||
right: {
|
||||
operator: ".",
|
||||
left: { id: "e" },
|
||||
right: {
|
||||
operator: "..",
|
||||
left: { id: "f" },
|
||||
right: {
|
||||
operator: ".",
|
||||
left: { id: "g" },
|
||||
right: { special: "*" },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should parse var declaration with error", function () {
|
||||
let parser = new Parser("var 123 = a");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.var));
|
||||
|
||||
parser = new Parser(`var "123" = a`);
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.var));
|
||||
|
||||
parser = new Parser(`var for var a`);
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.var));
|
||||
});
|
||||
|
||||
it("should parse for declaration with a step", function () {
|
||||
const parser = new Parser(`
|
||||
var s = 0
|
||||
for var i = 1 upto 10 + x step 1 do
|
||||
s = s + i * 2
|
||||
endfor`);
|
||||
expect(parser.parse().dump()).toEqual([
|
||||
{
|
||||
var: "s",
|
||||
expr: 0,
|
||||
},
|
||||
{
|
||||
decl: "for",
|
||||
assignment: {
|
||||
var: "i",
|
||||
expr: 1,
|
||||
},
|
||||
type: "upto",
|
||||
end: {
|
||||
operator: "+",
|
||||
left: 10,
|
||||
right: { id: "x" },
|
||||
},
|
||||
step: 1,
|
||||
body: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: {
|
||||
operator: "+",
|
||||
left: { id: "s" },
|
||||
right: {
|
||||
operator: "*",
|
||||
left: { id: "i" },
|
||||
right: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should parse for declaration without a step", function () {
|
||||
const parser = new Parser(`
|
||||
for i = 1 + 2 downto 10 do
|
||||
s = foo()
|
||||
endfor`);
|
||||
expect(parser.parse().dump()).toEqual([
|
||||
{
|
||||
decl: "for",
|
||||
assignment: {
|
||||
assignment: "i",
|
||||
expr: 3,
|
||||
},
|
||||
type: "downto",
|
||||
end: 10,
|
||||
step: null,
|
||||
body: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: {
|
||||
callee: { id: "foo" },
|
||||
params: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should parse for declaration with error", function () {
|
||||
let parser = new Parser("for 123 = i upto 1 do a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.assignment));
|
||||
|
||||
parser = new Parser("for var 123 = i upto 1 do a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.assignment));
|
||||
|
||||
parser = new Parser("for var i = 123 upt 1 do a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.for));
|
||||
|
||||
parser = new Parser("for var i = 123 var 1 do a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.for));
|
||||
|
||||
parser = new Parser(
|
||||
"for var i = 123 upto 1 step for var j = 1 do endfor do a = 1 endfor"
|
||||
);
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.for));
|
||||
|
||||
parser = new Parser("for var i = 123 downto 1 do a = 1 endfunc");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.for));
|
||||
|
||||
parser = new Parser("for var i = 123 downto 1 do a = 1");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.for));
|
||||
});
|
||||
|
||||
it("should parse foreach declaration", function () {
|
||||
const parser = new Parser(`
|
||||
foreach i in (a, b, c, d) do
|
||||
s = foo()[i]
|
||||
endfor`);
|
||||
expect(parser.parse().dump()).toEqual([
|
||||
{
|
||||
decl: "foreach",
|
||||
id: "i",
|
||||
params: [{ id: "a" }, { id: "b" }, { id: "c" }, { id: "d" }],
|
||||
body: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: {
|
||||
operand: {
|
||||
callee: { id: "foo" },
|
||||
params: [],
|
||||
},
|
||||
index: { id: "i" },
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should parse foreach declaration with error", function () {
|
||||
let parser = new Parser("foreach 123 in (1, 2, 3) do a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.foreach));
|
||||
|
||||
parser = new Parser("foreach foo in 1, 2, 3) do a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.foreach));
|
||||
|
||||
parser = new Parser("foreach foo in (1, 2, 3 do a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.params));
|
||||
|
||||
parser = new Parser("foreach foo in (1, 2 3) do a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.params));
|
||||
|
||||
parser = new Parser("foreach foo in (1, 2, 3) od a = 1 endfor");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.foreach));
|
||||
|
||||
parser = new Parser("foreach foo in (1, 2, 3) do a = 1 endforeach");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.foreach));
|
||||
|
||||
parser = new Parser("foreach foo in (1, 2, 3) do a = 1 123");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.foreach));
|
||||
});
|
||||
|
||||
it("should parse while declaration", function () {
|
||||
const parser = new Parser(`
|
||||
while (1) do
|
||||
if (0) then
|
||||
break
|
||||
else
|
||||
continue
|
||||
endif
|
||||
endwhile
|
||||
`);
|
||||
expect(parser.parse().dump()).toEqual([
|
||||
{
|
||||
decl: "while",
|
||||
condition: 1,
|
||||
body: [
|
||||
{
|
||||
decl: "if",
|
||||
condition: 0,
|
||||
then: [{ special: "break" }],
|
||||
elseif: null,
|
||||
else: [{ special: "continue" }],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should parse while declaration with error", function () {
|
||||
let parser = new Parser("while a == 1 do a = 2 endwhile");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.while));
|
||||
|
||||
parser = new Parser("while (a == 1 do a = 2 endwhile");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.while));
|
||||
|
||||
parser = new Parser("while (a == 1) var a = 2 endwhile");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.while));
|
||||
|
||||
parser = new Parser("while (a == 1) do var a = 2 end");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.while));
|
||||
});
|
||||
|
||||
it("should parse do declaration", function () {
|
||||
const parser = new Parser(`
|
||||
do
|
||||
x = 1
|
||||
; a comment in the middle of the block
|
||||
y = 2
|
||||
end
|
||||
`);
|
||||
expect(parser.parse().dump()).toEqual([
|
||||
{
|
||||
decl: "block",
|
||||
body: [
|
||||
{
|
||||
assignment: "x",
|
||||
expr: 1,
|
||||
},
|
||||
{
|
||||
assignment: "y",
|
||||
expr: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should parse do declaration with error", function () {
|
||||
const parser = new Parser(`
|
||||
do
|
||||
x = 1
|
||||
y = 2
|
||||
endfunc
|
||||
`);
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.block));
|
||||
});
|
||||
|
||||
it("should parse func declaration", function () {
|
||||
const parser = new Parser(`
|
||||
func こんにちは世界123(a, b) do
|
||||
a + b
|
||||
endfunc
|
||||
`);
|
||||
expect(parser.parse().dump()).toEqual([
|
||||
{
|
||||
func: "こんにちは世界123",
|
||||
params: ["a", "b"],
|
||||
body: [
|
||||
{
|
||||
operator: "+",
|
||||
left: { id: "a" },
|
||||
right: { id: "b" },
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should parse func declaration with error", function () {
|
||||
let parser = new Parser("func 123(a, b) do a = 1 endfunc");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.func));
|
||||
|
||||
parser = new Parser("func foo(a, b) for a = 1 endfunc");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.func));
|
||||
|
||||
parser = new Parser("func foo(a, b) do a = 1 endfun");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.func));
|
||||
|
||||
parser = new Parser("func foo(a, b, c do a = 1 endfunc");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.func));
|
||||
|
||||
parser = new Parser("func foo(a, b, 123) do a = 1 endfunc");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.func));
|
||||
});
|
||||
|
||||
it("should parse if declaration", function () {
|
||||
const parser = new Parser(`
|
||||
if (a & b) then
|
||||
var s = 1
|
||||
endif
|
||||
|
||||
if (a or b) then
|
||||
var s = 1
|
||||
else
|
||||
var x = 2
|
||||
endif
|
||||
|
||||
if (0) then
|
||||
s = 1
|
||||
elseif (1) then
|
||||
s = 2
|
||||
elseif (2) then
|
||||
s = 3
|
||||
elseif (3) then
|
||||
s = 4
|
||||
else
|
||||
s = 5
|
||||
endif
|
||||
|
||||
// a comment
|
||||
|
||||
if (0) then
|
||||
s = 1
|
||||
elseif (1) then
|
||||
s = 2
|
||||
endif
|
||||
`);
|
||||
expect(parser.parse().dump()).toEqual([
|
||||
{
|
||||
decl: "if",
|
||||
condition: {
|
||||
operator: "&&",
|
||||
left: { id: "a" },
|
||||
right: { id: "b" },
|
||||
},
|
||||
then: [
|
||||
{
|
||||
var: "s",
|
||||
expr: 1,
|
||||
},
|
||||
],
|
||||
elseif: null,
|
||||
else: null,
|
||||
},
|
||||
{
|
||||
decl: "if",
|
||||
condition: {
|
||||
operator: "||",
|
||||
left: { id: "a" },
|
||||
right: { id: "b" },
|
||||
},
|
||||
then: [
|
||||
{
|
||||
var: "s",
|
||||
expr: 1,
|
||||
},
|
||||
],
|
||||
elseif: null,
|
||||
else: [
|
||||
{
|
||||
var: "x",
|
||||
expr: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
decl: "if",
|
||||
condition: 0,
|
||||
then: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: 1,
|
||||
},
|
||||
],
|
||||
elseif: [
|
||||
{
|
||||
decl: "elseif",
|
||||
condition: 1,
|
||||
then: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
decl: "elseif",
|
||||
condition: 2,
|
||||
then: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
decl: "elseif",
|
||||
condition: 3,
|
||||
then: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: 4,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
else: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: 5,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
decl: "if",
|
||||
condition: 0,
|
||||
then: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: 1,
|
||||
},
|
||||
],
|
||||
elseif: [
|
||||
{
|
||||
decl: "elseif",
|
||||
condition: 1,
|
||||
then: [
|
||||
{
|
||||
assignment: "s",
|
||||
expr: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
else: null,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("should parse if declaration with error", function () {
|
||||
let parser = new Parser("if foo == 1 then a = 1 endif");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.if));
|
||||
|
||||
parser = new Parser("if (foo == 1 then a = 1 endif");
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.if));
|
||||
|
||||
parser = new Parser(
|
||||
"if (foo == 1) then a = 1 elseiff (foo == 2) then a = 2 endif"
|
||||
);
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.if));
|
||||
|
||||
parser = new Parser(
|
||||
"if (foo == 1) then a = 1 elseif (foo == 2) then a = 2 end"
|
||||
);
|
||||
expect(() => parser.parse()).toThrow(new Error(Errors.if));
|
||||
});
|
||||
|
||||
it("should parse som predicate", () => {
|
||||
const parser = new Parser("a.b <= 3");
|
||||
const expr = parser.parse().expressions[0];
|
||||
expect(expr.isSomPredicate()).toEqual(true);
|
||||
expect(expr.left.isSomPredicate()).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
1541
test/unit/xfa_parser_spec.js
Normal file
1541
test/unit/xfa_parser_spec.js
Normal file
File diff suppressed because it is too large
Load Diff
75
test/unit/xfa_serialize_data_spec.js
Normal file
75
test/unit/xfa_serialize_data_spec.js
Normal file
@@ -0,0 +1,75 @@
|
||||
/* Copyright 2021 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { $uid } from "../../src/core/xfa/symbol_utils.js";
|
||||
import { DataHandler } from "../../src/core/xfa/data.js";
|
||||
import { searchNode } from "../../src/core/xfa/som.js";
|
||||
import { XFAParser } from "../../src/core/xfa/parser.js";
|
||||
|
||||
describe("Data serializer", function () {
|
||||
it("should serialize data with an annotationStorage", function () {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<foo>bar</foo>
|
||||
<xfa:data>
|
||||
<Receipt>
|
||||
<Page>1</Page>
|
||||
<Detail PartNo="GS001">
|
||||
<Description>Giant Slingshot</Description>
|
||||
<Units>1</Units>
|
||||
<Unit_Price>250.00</Unit_Price>
|
||||
<Total_Price>250.00</Total_Price>
|
||||
<àé></àé>
|
||||
</Detail>
|
||||
<Page>2</Page>
|
||||
<Detail PartNo="RRB-LB">
|
||||
<Description>Road Runner Bait, large bag</Description>
|
||||
<Units>5</Units>
|
||||
<Unit_Price>12.00</Unit_Price>
|
||||
<Total_Price>60.00</Total_Price>
|
||||
</Detail>
|
||||
<Sub_Total>310.00</Sub_Total>
|
||||
<Tax>24.80</Tax>
|
||||
<Total_Price>334.80</Total_Price>
|
||||
</Receipt>
|
||||
</xfa:data>
|
||||
<bar>foo</bar>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const root = new XFAParser().parse(xml);
|
||||
const data = root.datasets.data;
|
||||
const dataHandler = new DataHandler(root, data);
|
||||
|
||||
const storage = new Map();
|
||||
for (const [path, value] of [
|
||||
["Receipt.Detail[0].Units", "12&3"],
|
||||
["Receipt.Detail[0].Unit_Price", "456>"],
|
||||
["Receipt.Detail[0].Total_Price", "789"],
|
||||
["Receipt.Detail[0].àé", "1011"],
|
||||
["Receipt.Detail[1].PartNo", "foo-bar😀"],
|
||||
["Receipt.Detail[1].Description", "hello world"],
|
||||
]) {
|
||||
storage.set(searchNode(root, data, path)[0][$uid], { value });
|
||||
}
|
||||
|
||||
const serialized = dataHandler.serialize(storage);
|
||||
const expected = `<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/"><foo>bar</foo><bar>foo</bar><xfa:data><Receipt><Page>1</Page><Detail PartNo="GS001"><Description>Giant Slingshot</Description><Units>12&3</Units><Unit_Price>456></Unit_Price><Total_Price>789</Total_Price><\xC3\xA0\xC3\xA9>1011</\xC3\xA0\xC3\xA9></Detail><Page>2</Page><Detail PartNo="foo-bar😀"><Description>hello world</Description><Units>5</Units><Unit_Price>12.00</Unit_Price><Total_Price>60.00</Total_Price></Detail><Sub_Total>310.00</Sub_Total><Tax>24.80</Tax><Total_Price>334.80</Total_Price></Receipt></xfa:data></xfa:datasets>`;
|
||||
|
||||
expect(serialized).toEqual(expected);
|
||||
});
|
||||
});
|
||||
686
test/unit/xfa_tohtml_spec.js
Normal file
686
test/unit/xfa_tohtml_spec.js
Normal file
@@ -0,0 +1,686 @@
|
||||
/* Copyright 2020 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { XFAFactory } from "../../src/core/xfa/factory.js";
|
||||
|
||||
describe("XFAFactory", function () {
|
||||
function searchHtmlNode(root, name, value, byAttributes = false, nth = [0]) {
|
||||
if (
|
||||
(!byAttributes && root[name] === value) ||
|
||||
(byAttributes && root.attributes?.[name] === value)
|
||||
) {
|
||||
if (nth[0]-- === 0) {
|
||||
return root;
|
||||
}
|
||||
}
|
||||
if (!root.children) {
|
||||
return null;
|
||||
}
|
||||
for (const child of root.children) {
|
||||
const node = searchHtmlNode(child, name, value, byAttributes, nth);
|
||||
if (node) {
|
||||
return node;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
describe("toHTML", function () {
|
||||
it("should convert some basic properties to CSS", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="123pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<draw y="1pt" w="11pt" h="22pt" rotate="90" x="2pt">
|
||||
<assist><toolTip>A tooltip !!</toolTip></assist>
|
||||
<font size="7pt" typeface="FooBar" baselineShift="2pt">
|
||||
<fill>
|
||||
<color value="12,23,34"/>
|
||||
<solid/>
|
||||
</fill>
|
||||
</font>
|
||||
<value/>
|
||||
<margin topInset="1pt" bottomInset="2pt" leftInset="3pt" rightInset="4pt"/>
|
||||
<para spaceAbove="1pt" spaceBelow="2pt" textIndent="3pt" marginLeft="4pt" marginRight="5pt"/>
|
||||
</draw>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
<subform name="second">
|
||||
<breakBefore targetType="pageArea" startNew="1"/>
|
||||
<subform>
|
||||
<draw w="1pt" h="1pt"><value><text>foo</text></value></draw>
|
||||
</subform>
|
||||
</subform>
|
||||
<subform name="third">
|
||||
<breakBefore targetType="pageArea" startNew="1"/>
|
||||
<subform>
|
||||
<draw w="1pt" h="1pt"><value><text>bar</text></value></draw>
|
||||
</subform>
|
||||
</subform>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
factory.setFonts([]);
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(2);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const page1 = pages.children[0];
|
||||
expect(page1.attributes.style).toEqual({
|
||||
height: "789px",
|
||||
width: "456px",
|
||||
});
|
||||
|
||||
expect(page1.children.length).toEqual(2);
|
||||
const container = page1.children[1];
|
||||
expect(container.attributes.class).toEqual(["xfaContentarea"]);
|
||||
expect(container.attributes.style).toEqual({
|
||||
height: "789px",
|
||||
width: "456px",
|
||||
left: "123px",
|
||||
top: "0px",
|
||||
});
|
||||
|
||||
const wrapper = page1.children[0];
|
||||
const draw = wrapper.children[0];
|
||||
|
||||
expect(wrapper.attributes.class).toEqual(["xfaWrapper"]);
|
||||
expect(wrapper.attributes.style).toEqual({
|
||||
alignSelf: "start",
|
||||
height: "22px",
|
||||
left: "2px",
|
||||
position: "absolute",
|
||||
top: "1px",
|
||||
transform: "rotate(-90deg)",
|
||||
transformOrigin: "top left",
|
||||
width: "11px",
|
||||
});
|
||||
|
||||
expect(draw.attributes.class).toEqual([
|
||||
"xfaDraw",
|
||||
"xfaFont",
|
||||
"xfaWrapped",
|
||||
]);
|
||||
expect(draw.attributes.title).toEqual("A tooltip !!");
|
||||
expect(draw.attributes.style).toEqual({
|
||||
color: "#0c1722",
|
||||
fontFamily: '"FooBar"',
|
||||
fontKerning: "none",
|
||||
letterSpacing: "0px",
|
||||
fontStyle: "normal",
|
||||
fontWeight: "normal",
|
||||
fontSize: "6.93px",
|
||||
padding: "1px 4px 2px 3px",
|
||||
verticalAlign: "2px",
|
||||
});
|
||||
|
||||
// draw element must be on each page.
|
||||
expect(draw.attributes.style).toEqual(
|
||||
pages.children[1].children[0].children[0].attributes.style
|
||||
);
|
||||
});
|
||||
|
||||
it("should have an alt attribute from toolTip", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="0pt" w="456pt" h="789pt"/>
|
||||
<draw name="BA-Logo" y="5.928mm" x="128.388mm" w="71.237mm" h="9.528mm">
|
||||
<value>
|
||||
<image contentType="image/png">iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVQYV2NgYAAAAAMAAWgmWQ0AAAAASUVORK5CYII=</image>
|
||||
</value>
|
||||
<assist><toolTip>alt text</toolTip></assist>
|
||||
</draw>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const field = searchHtmlNode(pages, "name", "img");
|
||||
|
||||
expect(field.attributes.alt).toEqual("alt text");
|
||||
});
|
||||
|
||||
it("should have a aria heading role and level", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="0pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<draw name="BA-Logo" y="5.928mm" x="128.388mm" w="71.237mm" h="9.528mm">
|
||||
<value><text>foo</text></value>
|
||||
<assist role="H2"></assist>
|
||||
</draw>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const page1 = pages.children[0];
|
||||
const wrapper = page1.children[0];
|
||||
const draw = wrapper.children[0];
|
||||
|
||||
expect(draw.attributes.role).toEqual("heading");
|
||||
expect(draw.attributes["aria-level"]).toEqual("2");
|
||||
});
|
||||
|
||||
it("should have aria table role", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="0pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<font size="7pt" typeface="FooBar" baselineShift="2pt">
|
||||
</font>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
<subform name="table" mergeMode="matchTemplate" layout="table">
|
||||
<subform layout="row" name="row1">
|
||||
<assist role="TH"></assist>
|
||||
<draw name="header1" y="5.928mm" x="128.388mm" w="71.237mm" h="9.528mm">
|
||||
<value><text>Header Col 1</text></value>
|
||||
</draw>
|
||||
<draw name="header2" y="5.928mm" x="128.388mm" w="71.237mm" h="9.528mm">
|
||||
<value><text>Header Col 2</text></value>
|
||||
</draw>
|
||||
</subform>
|
||||
<subform layout="row" name="row2">
|
||||
<draw name="cell1" y="5.928mm" x="128.388mm" w="71.237mm" h="9.528mm">
|
||||
<value><text>Cell 1</text></value>
|
||||
</draw>
|
||||
<draw name="cell2" y="5.928mm" x="128.388mm" w="71.237mm" h="9.528mm">
|
||||
<value><text>Cell 2</text></value>
|
||||
</draw>
|
||||
</subform>
|
||||
</subform>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
factory.setFonts([]);
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const table = searchHtmlNode(
|
||||
pages,
|
||||
"xfaName",
|
||||
"table",
|
||||
/* byAttributes */ true
|
||||
);
|
||||
expect(table.attributes.role).toEqual("table");
|
||||
const headerRow = searchHtmlNode(
|
||||
pages,
|
||||
"xfaName",
|
||||
"row1",
|
||||
/* byAttributes */ true
|
||||
);
|
||||
expect(headerRow.attributes.role).toEqual("row");
|
||||
const headerCell = searchHtmlNode(
|
||||
pages,
|
||||
"xfaName",
|
||||
"header2",
|
||||
/* byAttributes */ true
|
||||
);
|
||||
expect(headerCell.attributes.role).toEqual("columnheader");
|
||||
const row = searchHtmlNode(
|
||||
pages,
|
||||
"xfaName",
|
||||
"row2",
|
||||
/* byAttributes */ true
|
||||
);
|
||||
expect(row.attributes.role).toEqual("row");
|
||||
const cell = searchHtmlNode(
|
||||
pages,
|
||||
"xfaName",
|
||||
"cell2",
|
||||
/* byAttributes */ true
|
||||
);
|
||||
expect(cell.attributes.role).toEqual("cell");
|
||||
});
|
||||
|
||||
it("should have a maxLength property", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="0pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<ui>
|
||||
<textEdit multiLine="0"/>
|
||||
</ui>
|
||||
<value>
|
||||
<text maxChars="123"/>
|
||||
</value>
|
||||
</field>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
<subform name="first">
|
||||
<draw w="1pt" h="1pt"><value><text>foo</text></value></draw>
|
||||
</subform>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const field = searchHtmlNode(pages, "name", "input");
|
||||
|
||||
expect(field.attributes.maxLength).toEqual(123);
|
||||
});
|
||||
|
||||
it("should have an aria-label property from speak", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="0pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<assist><speak>Screen Reader</speak></assist>
|
||||
<ui>
|
||||
<textEdit multiLine="0"/>
|
||||
</ui>
|
||||
<value>
|
||||
<text maxChars="123"/>
|
||||
</value>
|
||||
</field>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
<subform name="first">
|
||||
<draw w="1pt" h="1pt"><value><text>foo</text></value></draw>
|
||||
</subform>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const field = searchHtmlNode(pages, "name", "input");
|
||||
|
||||
expect(field.attributes["aria-label"]).toEqual("Screen Reader");
|
||||
});
|
||||
|
||||
it("should have an aria-label property from toolTip", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="0pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<assist><toolTip>Screen Reader</toolTip></assist>
|
||||
<ui>
|
||||
<textEdit multiLine="0"/>
|
||||
</ui>
|
||||
<value>
|
||||
<text maxChars="123"/>
|
||||
</value>
|
||||
</field>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
<subform name="first">
|
||||
<draw w="1pt" h="1pt"><value><text>foo</text></value></draw>
|
||||
</subform>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const field = searchHtmlNode(pages, "name", "input");
|
||||
|
||||
expect(field.attributes["aria-label"]).toEqual("Screen Reader");
|
||||
});
|
||||
|
||||
it("should have an input or textarea", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="123pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<ui>
|
||||
<textEdit/>
|
||||
</ui>
|
||||
</field>
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<ui>
|
||||
<textEdit multiLine="1"/>
|
||||
</ui>
|
||||
</field>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
<subform name="first">
|
||||
<draw w="1pt" h="1pt"><value><text>foo</text></value></draw>
|
||||
</subform>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const field1 = searchHtmlNode(pages, "name", "input");
|
||||
expect(field1).not.toEqual(null);
|
||||
|
||||
const field2 = searchHtmlNode(pages, "name", "textarea");
|
||||
expect(field2).not.toEqual(null);
|
||||
});
|
||||
});
|
||||
|
||||
it("should have an input or textarea", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="123pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<ui>
|
||||
<textEdit multiLine="1"/>
|
||||
</ui>
|
||||
</field>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
<subform name="first">
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt" name="hello">
|
||||
<ui>
|
||||
<textEdit/>
|
||||
</ui>
|
||||
<value>
|
||||
<integer/>
|
||||
</value>
|
||||
</field>
|
||||
</subform>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
<toto>
|
||||
<first>
|
||||
<hello>123
|
||||
</hello>
|
||||
</first>
|
||||
</toto>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const field1 = searchHtmlNode(pages, "name", "input");
|
||||
expect(field1).not.toEqual(null);
|
||||
expect(field1.attributes.value).toEqual("123");
|
||||
});
|
||||
|
||||
it("should parse URLs correctly", async () => {
|
||||
function getXml(href) {
|
||||
return `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="0pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
<draw name="url" y="5.928mm" x="128.388mm" w="71.237mm" h="9.528mm">
|
||||
<value>
|
||||
<exData contentType="text/html">
|
||||
<body xmlns="http://www.w3.org/1999/xhtml">
|
||||
<a href="${href}">${href}</a>
|
||||
</body>
|
||||
</exData>
|
||||
</value>
|
||||
</draw>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
}
|
||||
let factory, pages, a;
|
||||
|
||||
// A valid, and complete, URL.
|
||||
factory = new XFAFactory({ "xdp:xdp": getXml("https://www.example.com/") });
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
pages = await factory.getPages();
|
||||
a = searchHtmlNode(pages, "name", "a");
|
||||
expect(a.value).toEqual("https://www.example.com/");
|
||||
expect(a.attributes.href).toEqual("https://www.example.com/");
|
||||
|
||||
// A valid, but incomplete, URL.
|
||||
factory = new XFAFactory({ "xdp:xdp": getXml("www.example.com/") });
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
pages = await factory.getPages();
|
||||
a = searchHtmlNode(pages, "name", "a");
|
||||
expect(a.value).toEqual("www.example.com/");
|
||||
expect(a.attributes.href).toEqual("http://www.example.com/");
|
||||
|
||||
// A valid email-address.
|
||||
factory = new XFAFactory({ "xdp:xdp": getXml("mailto:test@example.com") });
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
pages = await factory.getPages();
|
||||
a = searchHtmlNode(pages, "name", "a");
|
||||
expect(a.value).toEqual("mailto:test@example.com");
|
||||
expect(a.attributes.href).toEqual("mailto:test@example.com");
|
||||
|
||||
// Not a valid URL.
|
||||
factory = new XFAFactory({ "xdp:xdp": getXml("qwerty/") });
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
pages = await factory.getPages();
|
||||
a = searchHtmlNode(pages, "name", "a");
|
||||
expect(a.value).toEqual("qwerty/");
|
||||
expect(a.attributes.href).toEqual("");
|
||||
});
|
||||
|
||||
it("should replace button with an URL by a link", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="123pt" w="456pt" h="789pt"/>
|
||||
<medium stock="default" short="456pt" long="789pt"/>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
<subform name="first">
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<ui>
|
||||
<button/>
|
||||
</ui>
|
||||
<event activity="click" name="event__click">
|
||||
<script contentType="application/x-javascript">
|
||||
app.launchURL("https://github.com/mozilla/pdf.js", true);
|
||||
</script>
|
||||
</event>
|
||||
</field>
|
||||
<field y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<ui>
|
||||
<button/>
|
||||
</ui>
|
||||
<event activity="click" name="event__click">
|
||||
<script contentType="application/x-javascript">
|
||||
xfa.host.gotoURL("https://github.com/allizom/pdf.js");
|
||||
</script>
|
||||
</event>
|
||||
</field>
|
||||
</subform>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
let a = searchHtmlNode(pages, "name", "a");
|
||||
expect(a.attributes.href).toEqual("https://github.com/mozilla/pdf.js");
|
||||
expect(a.attributes.newWindow).toEqual(true);
|
||||
|
||||
a = searchHtmlNode(pages, "name", "a", false, [1]);
|
||||
expect(a.attributes.href).toEqual("https://github.com/allizom/pdf.js");
|
||||
expect(a.attributes.newWindow).toEqual(false);
|
||||
});
|
||||
|
||||
it("should take the absolute value of the font size", async () => {
|
||||
const xml = `
|
||||
<?xml version="1.0"?>
|
||||
<xdp:xdp xmlns:xdp="http://ns.adobe.com/xdp/">
|
||||
<template xmlns="http://www.xfa.org/schema/xfa-template/3.3">
|
||||
<subform name="root" mergeMode="matchTemplate">
|
||||
<pageSet>
|
||||
<pageArea>
|
||||
<contentArea x="0pt" w="456pt" h="789pt"/>
|
||||
<draw y="1pt" w="11pt" h="22pt" x="2pt">
|
||||
<value>
|
||||
<text>
|
||||
<body xmlns="http://www.w3.org/1999/xhtml">
|
||||
<p style="foo: bar; text-indent:0.5in; line-height:11px;font-size: -14.0pt; bar:foo;tab-stop: left 0.5in;">
|
||||
The first line of this paragraph is indented a half-inch.<br/>
|
||||
Successive lines are not indented.<br/>
|
||||
This is the last line of the paragraph.<br/>
|
||||
</p>
|
||||
</body>
|
||||
</text>
|
||||
</value>
|
||||
</draw>
|
||||
</pageArea>
|
||||
</pageSet>
|
||||
</subform>
|
||||
</template>
|
||||
<xfa:datasets xmlns:xfa="http://www.xfa.org/schema/xfa-data/1.0/">
|
||||
<xfa:data>
|
||||
</xfa:data>
|
||||
</xfa:datasets>
|
||||
</xdp:xdp>
|
||||
`;
|
||||
const factory = new XFAFactory({ "xdp:xdp": xml });
|
||||
|
||||
expect(await factory.getNumPages()).toEqual(1);
|
||||
|
||||
const pages = await factory.getPages();
|
||||
const p = searchHtmlNode(pages, "name", "p");
|
||||
expect(p.attributes.style.fontSize).toEqual("13.86px");
|
||||
});
|
||||
});
|
||||
135
test/unit/xml_spec.js
Normal file
135
test/unit/xml_spec.js
Normal file
@@ -0,0 +1,135 @@
|
||||
/* Copyright 2020 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { SimpleXMLParser, XMLParserBase } from "../../src/core/xml_parser.js";
|
||||
import { parseXFAPath } from "../../src/core/core_utils.js";
|
||||
|
||||
describe("XML", function () {
|
||||
describe("searchNode", function () {
|
||||
it("should search a node with a given path in xml tree", function () {
|
||||
const xml = `
|
||||
<a>
|
||||
<b>
|
||||
<c a="123"/>
|
||||
<d/>
|
||||
<e>
|
||||
<f>
|
||||
<g a="321"/>
|
||||
</f>
|
||||
</e>
|
||||
<c a="456"/>
|
||||
<c a="789"/>
|
||||
<h/>
|
||||
<c a="101112"/>
|
||||
</b>
|
||||
<h>
|
||||
<i/>
|
||||
<j/>
|
||||
<k>
|
||||
<g a="654"/>
|
||||
</k>
|
||||
</h>
|
||||
<b>
|
||||
<g a="987"/>
|
||||
<h/>
|
||||
<g a="121110"/>
|
||||
</b>
|
||||
</a>`;
|
||||
const root = new SimpleXMLParser({ hasAttributes: true }).parseFromString(
|
||||
xml
|
||||
).documentElement;
|
||||
function getAttr(path) {
|
||||
return root.searchNode(parseXFAPath(path), 0).attributes[0].value;
|
||||
}
|
||||
|
||||
expect(getAttr("b.g")).toEqual("321");
|
||||
expect(getAttr("e.f.g")).toEqual("321");
|
||||
expect(getAttr("e.g")).toEqual("321");
|
||||
expect(getAttr("g")).toEqual("321");
|
||||
expect(getAttr("h.g")).toEqual("654");
|
||||
expect(getAttr("b[0].g")).toEqual("321");
|
||||
expect(getAttr("b[1].g")).toEqual("987");
|
||||
expect(getAttr("b[1].g[0]")).toEqual("987");
|
||||
expect(getAttr("b[1].g[1]")).toEqual("121110");
|
||||
expect(getAttr("c")).toEqual("123");
|
||||
expect(getAttr("c[1]")).toEqual("456");
|
||||
expect(getAttr("c[2]")).toEqual("789");
|
||||
expect(getAttr("c[3]")).toEqual("101112");
|
||||
});
|
||||
|
||||
it("should dump a xml tree", function () {
|
||||
const xml = `
|
||||
<a>
|
||||
<b>
|
||||
<c a="123"/>
|
||||
<d>hello</d>
|
||||
<e>
|
||||
<f>
|
||||
<g a="321"/>
|
||||
</f>
|
||||
</e>
|
||||
<c a="456"/>
|
||||
<c a="789"/>
|
||||
<h/>
|
||||
<c a="101112"/>
|
||||
</b>
|
||||
<h>
|
||||
<i/>
|
||||
<j/>
|
||||
<k>
W😂rld
<g a="654"/>
|
||||
</k>
|
||||
</h>
|
||||
<b>
|
||||
<g a="987"/>
|
||||
<h/>
|
||||
<g a="121110"/>
|
||||
</b>
|
||||
</a>`;
|
||||
const root = new SimpleXMLParser({ hasAttributes: true }).parseFromString(
|
||||
xml
|
||||
).documentElement;
|
||||
const buffer = [];
|
||||
root.dump(buffer);
|
||||
|
||||
expect(buffer.join("").replaceAll(/\s+/g, "")).toEqual(
|
||||
xml.replaceAll(/\s+/g, "")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it("should parse processing instructions", function () {
|
||||
const xml = `
|
||||
<a>
|
||||
<?foo bar?>
|
||||
<?foo bar oof?>
|
||||
<?foo?>
|
||||
</a>`;
|
||||
const pi = [];
|
||||
|
||||
class MyParser extends XMLParserBase {
|
||||
onPi(name, value) {
|
||||
pi.push([name, value]);
|
||||
}
|
||||
}
|
||||
|
||||
new MyParser().parseXml(xml);
|
||||
|
||||
expect(pi).toEqual([
|
||||
["foo", "bar"],
|
||||
["foo", "bar oof"],
|
||||
["foo", ""],
|
||||
]);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user