mirror of
http://101.35.51.105:3000/congyu/Hakysidian.git
synced 2026-04-28 05:50:49 +08:00
first commit
This commit is contained in:
+12
@@ -0,0 +1,12 @@
|
|||||||
|
_cache/
|
||||||
|
dist-newstyle/
|
||||||
|
.DS_Store
|
||||||
|
cache/
|
||||||
|
packagedb/
|
||||||
|
build/
|
||||||
|
_site/
|
||||||
|
images/
|
||||||
|
notes/
|
||||||
|
# binaries
|
||||||
|
katex_cli
|
||||||
|
site
|
||||||
+130
@@ -0,0 +1,130 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<style xmlns="http://purl.org/net/xbiblio/csl" class="in-text" version="1.0" demote-non-dropping-particle="sort-only" default-locale="en-US">
|
||||||
|
<!-- This style was edited with the Visual CSL Editor (http://editor.citationstyles.org/visualEditor/) -->
|
||||||
|
<info>
|
||||||
|
<title>Elsevier (numeric, with titles)</title>
|
||||||
|
<id>http://www.zotero.org/styles/elsevier-numeric-with-titles</id>
|
||||||
|
<link href="http://www.zotero.org/styles/elsevier-numeric-with-titles" rel="self"/>
|
||||||
|
<link href="http://www.zotero.org/styles/elsevier-without-titles" rel="template"/>
|
||||||
|
<link href="http://www.elsevier.com/journals/solid-state-electronics/0038-1101/guide-for-authors#68000" rel="documentation"/>
|
||||||
|
<author>
|
||||||
|
<name>Richard Karnesky</name>
|
||||||
|
<email>karnesky+zotero@gmail.com</email>
|
||||||
|
<uri>http://arc.nucapt.northwestern.edu/Richard_Karnesky</uri>
|
||||||
|
</author>
|
||||||
|
<contributor>
|
||||||
|
<name>Rintze Zelle</name>
|
||||||
|
<uri>http://twitter.com/rintzezelle</uri>
|
||||||
|
</contributor>
|
||||||
|
<category citation-format="numeric"/>
|
||||||
|
<category field="generic-base"/>
|
||||||
|
<summary>A style for many of Elsevier's journals that includes article titles in the reference list</summary>
|
||||||
|
<updated>2025-06-05T07:59:51+00:00</updated>
|
||||||
|
<rights license="http://creativecommons.org/licenses/by-sa/3.0/">This work is licensed under a Creative Commons Attribution-ShareAlike 3.0 License</rights>
|
||||||
|
</info>
|
||||||
|
<macro name="author">
|
||||||
|
<names variable="author">
|
||||||
|
<name initialize-with="." delimiter=", " delimiter-precedes-last="always"/>
|
||||||
|
<label form="short" prefix=", "/>
|
||||||
|
<substitute>
|
||||||
|
<names variable="editor"/>
|
||||||
|
<names variable="translator"/>
|
||||||
|
</substitute>
|
||||||
|
</names>
|
||||||
|
</macro>
|
||||||
|
<macro name="editor">
|
||||||
|
<names variable="editor">
|
||||||
|
<name initialize-with="." delimiter=", " delimiter-precedes-last="always"/>
|
||||||
|
<label form="short" prefix=" (" text-case="capitalize-first" suffix=".)" strip-periods="true"/>
|
||||||
|
</names>
|
||||||
|
</macro>
|
||||||
|
<macro name="year-date">
|
||||||
|
<choose>
|
||||||
|
<if variable="issued">
|
||||||
|
<date variable="issued">
|
||||||
|
<date-part name="year"/>
|
||||||
|
</date>
|
||||||
|
</if>
|
||||||
|
<else>
|
||||||
|
<text term="no date" form="short"/>
|
||||||
|
</else>
|
||||||
|
</choose>
|
||||||
|
</macro>
|
||||||
|
<macro name="publisher">
|
||||||
|
<text variable="publisher" suffix=", "/>
|
||||||
|
<text variable="publisher-place" suffix=", "/>
|
||||||
|
<text macro="year-date"/>
|
||||||
|
</macro>
|
||||||
|
<macro name="edition">
|
||||||
|
<choose>
|
||||||
|
<if is-numeric="edition">
|
||||||
|
<group delimiter=" ">
|
||||||
|
<number variable="edition" form="ordinal"/>
|
||||||
|
<text term="edition" form="short"/>
|
||||||
|
</group>
|
||||||
|
</if>
|
||||||
|
<else>
|
||||||
|
<text variable="edition"/>
|
||||||
|
</else>
|
||||||
|
</choose>
|
||||||
|
</macro>
|
||||||
|
<citation collapse="citation-number">
|
||||||
|
<sort>
|
||||||
|
<key variable="citation-number"/>
|
||||||
|
</sort>
|
||||||
|
<layout prefix="[" suffix="]" delimiter=",">
|
||||||
|
<text variable="citation-number"/>
|
||||||
|
</layout>
|
||||||
|
</citation>
|
||||||
|
<bibliography entry-spacing="0" second-field-align="flush" et-al-min="7" et-al-use-first="6">
|
||||||
|
<layout suffix=".">
|
||||||
|
<text variable="citation-number" prefix="[" suffix="]"/>
|
||||||
|
<text macro="author" prefix=" " suffix=", "/>
|
||||||
|
<choose>
|
||||||
|
<if type="bill book graphic legal_case legislation motion_picture report song" match="any">
|
||||||
|
<group delimiter=", ">
|
||||||
|
<text variable="title" font-style="normal" font-weight="normal"/>
|
||||||
|
<text macro="edition"/>
|
||||||
|
<text macro="publisher"/>
|
||||||
|
</group>
|
||||||
|
</if>
|
||||||
|
<else-if type="chapter paper-conference" match="any">
|
||||||
|
<text variable="title" suffix=", "/>
|
||||||
|
<text term="in" suffix=": "/>
|
||||||
|
<text variable="container-title" form="short" text-case="title" font-style="italic" suffix=", "/>
|
||||||
|
<text macro="edition" suffix=", "/>
|
||||||
|
<text macro="publisher"/>
|
||||||
|
<group delimiter=" ">
|
||||||
|
<label variable="page" form="short" prefix=": "/>
|
||||||
|
<text variable="page"/>
|
||||||
|
</group>
|
||||||
|
</else-if>
|
||||||
|
<else-if type="patent">
|
||||||
|
<group delimiter=", ">
|
||||||
|
<text variable="title"/>
|
||||||
|
<text variable="number"/>
|
||||||
|
<text macro="year-date"/>
|
||||||
|
</group>
|
||||||
|
</else-if>
|
||||||
|
<else-if type="thesis">
|
||||||
|
<group delimiter=", ">
|
||||||
|
<text variable="title"/>
|
||||||
|
<text variable="genre"/>
|
||||||
|
<text variable="publisher"/>
|
||||||
|
<text macro="year-date"/>
|
||||||
|
</group>
|
||||||
|
</else-if>
|
||||||
|
<else>
|
||||||
|
<group delimiter=" ">
|
||||||
|
<text variable="title" font-style="normal" font-weight="normal" suffix=","/>
|
||||||
|
<text variable="container-title" form="short" text-case="title" font-style="italic" suffix="."/>
|
||||||
|
<text variable="volume"/>
|
||||||
|
<text macro="year-date" prefix="(" suffix=")"/>
|
||||||
|
<text variable="page" form="short"/>
|
||||||
|
</group>
|
||||||
|
</else>
|
||||||
|
</choose>
|
||||||
|
<text variable="DOI" prefix=" "/>
|
||||||
|
</layout>
|
||||||
|
</bibliography>
|
||||||
|
</style>
|
||||||
@@ -0,0 +1,87 @@
|
|||||||
|
.theorem-environment {
|
||||||
|
font-style: italic;
|
||||||
|
margin-top: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theorem-header {
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theorem-header .index:before {
|
||||||
|
content: ' ';
|
||||||
|
}
|
||||||
|
|
||||||
|
.theorem-header .name:before {
|
||||||
|
content: ' (';
|
||||||
|
}
|
||||||
|
|
||||||
|
.theorem-header .name:after {
|
||||||
|
content: ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
.theorem-header:after {
|
||||||
|
content: '.\2002\2002';
|
||||||
|
}
|
||||||
|
|
||||||
|
.theorem-header+p {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.Proof .type {
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.Proof {
|
||||||
|
font-style: normal;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.Proof:after {
|
||||||
|
content: '∎';
|
||||||
|
position: absolute;
|
||||||
|
right: 0px;
|
||||||
|
bottom: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.postindex {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.postindex cite {
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.postindex td.right {
|
||||||
|
text-align: right;
|
||||||
|
width: 11ex;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-section-number {
|
||||||
|
margin-right: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-section-number:after {
|
||||||
|
content: '.';
|
||||||
|
}
|
||||||
|
|
||||||
|
.csl-entry {
|
||||||
|
display: table;
|
||||||
|
width: 100%;
|
||||||
|
table-layout: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.csl-left-margin {
|
||||||
|
display: table-cell;
|
||||||
|
padding-right: 0.5em;
|
||||||
|
white-space: nowrap;
|
||||||
|
width: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.csl-right-inline {
|
||||||
|
display: table-cell;
|
||||||
|
}
|
||||||
|
.csl-right-inline a{
|
||||||
|
word-break: break-all;
|
||||||
|
}
|
||||||
+458
@@ -0,0 +1,458 @@
|
|||||||
|
:root {
|
||||||
|
--color-text: black;
|
||||||
|
--color-tag1: gray;
|
||||||
|
--color-tag2: darkolivegreen;
|
||||||
|
--color-bg: white;
|
||||||
|
--color-link: #337ab7;
|
||||||
|
--color-linkhbg: #e6f0ff;
|
||||||
|
--color-linkh: #002266;
|
||||||
|
--color-bq: olivedrab;
|
||||||
|
--color-notice: #fb4f4f;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
:root {
|
||||||
|
--color-text: white;
|
||||||
|
--color-bg: black;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
html {
|
||||||
|
scrollbar-gutter: stable;
|
||||||
|
font-size: 14pt;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Lato', -apple-system, BlinkMacSystemFont, 'PingFang SC', 'Microsoft YaHei', sans-serif;
|
||||||
|
font-optical-sizing: auto;
|
||||||
|
font-weight: 400;
|
||||||
|
font-style: normal;
|
||||||
|
font-size: 1rem;
|
||||||
|
line-height: 125%;
|
||||||
|
color: var(--color-text);
|
||||||
|
background-color: var(--color-bg);
|
||||||
|
text-rendering: optimizeLegibility;
|
||||||
|
}
|
||||||
|
body.lang-zh {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
body a {
|
||||||
|
color: var(--color-link);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-space a:hover {
|
||||||
|
background-color: var(--color-linkhbg);
|
||||||
|
color: var(--color-linkh);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
details {
|
||||||
|
background-color: var(--color-linkhbg);
|
||||||
|
}
|
||||||
|
summary:hover {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*mathML*/
|
||||||
|
.htmlmathparagraph, mtext,math {
|
||||||
|
font-family: Lete Sans Math;
|
||||||
|
}
|
||||||
|
.math-container,
|
||||||
|
#math-container {
|
||||||
|
display: block;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: hidden;
|
||||||
|
padding: .5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.math-container.math-container-tagged {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: minmax(0, 1fr) auto minmax(0, 1fr);
|
||||||
|
align-items: center;
|
||||||
|
column-gap: 1rem;
|
||||||
|
overflow: visible;
|
||||||
|
padding: .5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.math-container.math-container-tagged .math-tag-spacer {
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.math-container.math-container-tagged .math-equation {
|
||||||
|
min-width: 0;
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: hidden;
|
||||||
|
padding: .5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.math-container.math-container-tagged .math-tag {
|
||||||
|
justify-self: end;
|
||||||
|
white-space: nowrap;
|
||||||
|
font-family: "IosevkaC", sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
.math-container.math-container-tagged .math-tag math {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-space .langtag {
|
||||||
|
color: var(--color-tag1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sc {
|
||||||
|
font-variant-caps: small-caps;
|
||||||
|
}
|
||||||
|
|
||||||
|
p {
|
||||||
|
hyphens: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.url {
|
||||||
|
word-break: break-all;
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
font-weight: 400;
|
||||||
|
font-family: "IosevkaC", sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* top bar*/
|
||||||
|
.navbar {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.navright a {
|
||||||
|
margin: 0 0 0 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Links inside the navbar */
|
||||||
|
.navbar a {
|
||||||
|
text-decoration: none;
|
||||||
|
color: var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
.navbar a:visited {
|
||||||
|
color: var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
nav {
|
||||||
|
text-align: right;
|
||||||
|
border-bottom: solid 1px var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
nav a {
|
||||||
|
font-size: 1.2rem;
|
||||||
|
/*margin-left: 0.5em;*/
|
||||||
|
display: inline-block;
|
||||||
|
vertical-align: middle;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.uri {
|
||||||
|
word-wrap: break-word;
|
||||||
|
/* Legacy support */
|
||||||
|
overflow-wrap: break-word;
|
||||||
|
/* Modern property */
|
||||||
|
word-break: break-all;
|
||||||
|
/* Break long words if necessary */
|
||||||
|
white-space: normal;
|
||||||
|
/* Allow wrapping */
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
footer {
|
||||||
|
color: var(--color-text);
|
||||||
|
font-size: 0.8rem;
|
||||||
|
margin-top: 2em;
|
||||||
|
text-align: right;
|
||||||
|
padding-right: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1,
|
||||||
|
h2,
|
||||||
|
h3,
|
||||||
|
h4,
|
||||||
|
h5,
|
||||||
|
h6 {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pagetitle {
|
||||||
|
font-size: 2rem;
|
||||||
|
font-weight: normal;
|
||||||
|
font-style: normal;
|
||||||
|
text-align: left;
|
||||||
|
line-height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
margin-top: 1em;
|
||||||
|
font-size: 1.44rem;
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: normal;
|
||||||
|
text-align: left;
|
||||||
|
line-height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
h2 {
|
||||||
|
margin-top: 1em;
|
||||||
|
font-size: 1.2rem;
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: normal
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 {
|
||||||
|
margin-top: 1em;
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: normal
|
||||||
|
}
|
||||||
|
|
||||||
|
article .header {
|
||||||
|
font-size: 1rem;
|
||||||
|
font-style: normal;
|
||||||
|
color: var(--color-tag1);
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.info {
|
||||||
|
color: var(--color-tag2);
|
||||||
|
font-size: 1rem;
|
||||||
|
font-style: normal;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info a {
|
||||||
|
color: var(--color-tag2);
|
||||||
|
font-size: 1rem;
|
||||||
|
font-style: normal;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info a:visited {
|
||||||
|
color: var(--color-tag2);
|
||||||
|
}
|
||||||
|
|
||||||
|
section.body {
|
||||||
|
margin-top: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ascii-art {
|
||||||
|
font-family: monospace;
|
||||||
|
line-height: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
blockquote {
|
||||||
|
margin: 1rem 0;
|
||||||
|
padding: 0 0 0 1.5em;
|
||||||
|
border-left: 3px solid var(--color-bq);
|
||||||
|
}
|
||||||
|
|
||||||
|
blockquote p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol {
|
||||||
|
padding-left: 2em;
|
||||||
|
}
|
||||||
|
ul {
|
||||||
|
list-style-type: square;
|
||||||
|
padding-left: 2em;
|
||||||
|
}
|
||||||
|
li {
|
||||||
|
margin-bottom: 0.15em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table,
|
||||||
|
th,
|
||||||
|
td {
|
||||||
|
border: 1px solid darkolivegreen;
|
||||||
|
border-collapse: collapse;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
figure {
|
||||||
|
display: flex;
|
||||||
|
flex-flow: column;
|
||||||
|
padding: 5px;
|
||||||
|
margin: auto;
|
||||||
|
max-width: 80%;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
figcaption {
|
||||||
|
/* font: italic smaller sans-serif; */
|
||||||
|
padding: 3px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.caption {
|
||||||
|
display: none
|
||||||
|
}
|
||||||
|
|
||||||
|
.centerimg img {
|
||||||
|
margin: 0 auto 0 auto;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
div.highlight,
|
||||||
|
pre code {
|
||||||
|
margin: auto;
|
||||||
|
padding: 10px;
|
||||||
|
overflow: auto;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
font-family: "IosevkaC", monospace;
|
||||||
|
margin: 0 auto;
|
||||||
|
display: inline-block;
|
||||||
|
padding: 0px 2px;
|
||||||
|
border-radius: 2px;
|
||||||
|
font-variant-ligatures: none;
|
||||||
|
font-kerning: none;
|
||||||
|
text-rendering: optimizeSpeed;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.draft-notice {
|
||||||
|
color: var(--color-notice);
|
||||||
|
margin: 1em auto;
|
||||||
|
text-align: center
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.subtitle {
|
||||||
|
text-align: left;
|
||||||
|
font-size: 1.2rem;
|
||||||
|
margin-top: 0
|
||||||
|
}
|
||||||
|
.gallery {
|
||||||
|
margin-top: 2em;
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.gallery img {
|
||||||
|
width: 100%;
|
||||||
|
max-width: 320px;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* phones -- no sidebar no sidenotes*/
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
body {
|
||||||
|
/* width: 90%; */
|
||||||
|
margin: auto;
|
||||||
|
padding: 0 5%;
|
||||||
|
text-align: left;
|
||||||
|
max-width: 876px;
|
||||||
|
}
|
||||||
|
mjx-container[display="true"]
|
||||||
|
/*, .katex-display */ {
|
||||||
|
overflow-x: auto;
|
||||||
|
overflow-y: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* .katex-display>.katex>.katex-html>.tag {
|
||||||
|
display: inline-block;
|
||||||
|
position: relative;
|
||||||
|
padding-left: 10pt;
|
||||||
|
} */
|
||||||
|
}
|
||||||
|
|
||||||
|
.toc {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* sidebar. no sidenotes */
|
||||||
|
@media (min-width: 769px) {
|
||||||
|
body {
|
||||||
|
max-width: 1350px;
|
||||||
|
display: -webkit-flex;
|
||||||
|
-webkit-flex-flow: row wrap;
|
||||||
|
display: -ms-flexbox;
|
||||||
|
-ms-flex-flow: row wrap;
|
||||||
|
flex-flow: row wrap;
|
||||||
|
width: 95%;
|
||||||
|
padding-right: 5%;
|
||||||
|
margin: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toc {
|
||||||
|
margin-top: 5rem;
|
||||||
|
margin-left: 0;
|
||||||
|
margin-right: 0;
|
||||||
|
width: 33%;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
div#contents ul,
|
||||||
|
div#contents-big ul {
|
||||||
|
margin-top: 0.5em;
|
||||||
|
margin-bottom: 0.5em;
|
||||||
|
padding-left: 1em;
|
||||||
|
line-height: 1.2;
|
||||||
|
list-style-type: decimal;
|
||||||
|
margin-left: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
div#contents-big ul ul {
|
||||||
|
list-style-type: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div#contents-big li+li {
|
||||||
|
margin-top: 0.5em
|
||||||
|
}
|
||||||
|
|
||||||
|
div#contents-big {
|
||||||
|
font-size: 80%;
|
||||||
|
padding-top: 0;
|
||||||
|
padding-left: 1rem;
|
||||||
|
text-align: left;
|
||||||
|
max-width: 60%;
|
||||||
|
clear: both;
|
||||||
|
margin-right: 4em;
|
||||||
|
position: sticky;
|
||||||
|
top: 5rem;
|
||||||
|
left: 100%
|
||||||
|
}
|
||||||
|
|
||||||
|
div#contents-big .mini-header {
|
||||||
|
font-weight: bold;
|
||||||
|
margin: 0;
|
||||||
|
font-variant: small-caps;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-space {
|
||||||
|
display: inline-block;
|
||||||
|
width: 66%;
|
||||||
|
max-width: 875px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/* sidebar+sidenotes */
|
||||||
|
@media (min-width: 1200px) {
|
||||||
|
body {
|
||||||
|
width: 75%;
|
||||||
|
padding-right: 25%;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media print {
|
||||||
|
|
||||||
|
.no-print,
|
||||||
|
.no-print * {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,52 @@
|
|||||||
|
|
||||||
|
/* fonts */
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: "Lato";
|
||||||
|
src: url("/fonts/Lato-Regular.woff2") format("woff2");
|
||||||
|
font-weight: normal;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
@font-face {
|
||||||
|
font-family: "Lato";
|
||||||
|
src: url("/fonts/Lato-Bold.woff2") format("woff2");
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
@font-face {
|
||||||
|
font-family: "Lato";
|
||||||
|
src: url("/fonts/Lato-Italic.woff2") format("woff2");
|
||||||
|
font-weight: normal;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
@font-face {
|
||||||
|
font-family: "Lato";
|
||||||
|
src: url("/fonts/Lato-BoldItalic.woff2") format("woff2");
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
@font-face {
|
||||||
|
font-family: "Lete Sans Math";
|
||||||
|
src: url("/fonts/LeteSansMath.woff2") format("woff2");
|
||||||
|
font-weight: normal;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
@font-face {
|
||||||
|
font-family: "Lete Sans Math";
|
||||||
|
src: url("/fonts/LeteSansMath-Bold.woff2") format("woff2");
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: "IosevkaC";
|
||||||
|
src: url("/fonts/IosevkaCustom-Regular.woff2") format("woff2");
|
||||||
|
font-weight: normal;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
@font-face {
|
||||||
|
font-family: "IosevkaC";
|
||||||
|
src: url("/fonts/IosevkaCustom-Bold.woff2") format("woff2");
|
||||||
|
font-weight: bold;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
|
||||||
|
code.sourceCode
|
||||||
|
{
|
||||||
|
background: inherit
|
||||||
|
}
|
||||||
|
pre > code.sourceCode > span > a:first-child::before { text-decoration: underline; }
|
||||||
|
code span.al { color: #CB4B16; font-weight: bold; } /* Alert */
|
||||||
|
code span.an { color: #60a0b0; font-weight: bold; font-style: italic; } /* Annotation */
|
||||||
|
code span.at { color: #7d9029; } /* Attribute */
|
||||||
|
code span.bn { color: #D33682; } /* BaseN */
|
||||||
|
code span.bu { } /* BuiltIn */
|
||||||
|
code span.cf { color: #5F8700; font-weight: bold; } /* ControlFlow */
|
||||||
|
code span.ch { color: #16801a; } /* Char */
|
||||||
|
code span.cn { color: #880000; } /* Constant */
|
||||||
|
code span.co { color: #93A1A1; font-style: italic; } /* Comment */
|
||||||
|
code span.cv { color: #60a0b0; font-weight: bold; font-style: italic; } /* CommentVar */
|
||||||
|
code span.do { color: #ba2121; font-style: italic; } /* Documentation */
|
||||||
|
code span.dt { background-color: #f8edff; } /* DataType */
|
||||||
|
code span.dv { color: #D33682; } /* DecVal */
|
||||||
|
code span.er { color: #D30102; font-weight: bold; } /* Error */
|
||||||
|
code span.ex { } /* Extension */
|
||||||
|
code span.fl { color: #D33682; } /* Float */
|
||||||
|
code span.fu { } /* Function */
|
||||||
|
code span.im { color: #D70000} /* Import */
|
||||||
|
code span.in { color: #60a0b0; font-weight: bold; font-style: italic; } /* Information */
|
||||||
|
code span.kw { font-weight: bold; } /* Keyword */
|
||||||
|
code span.op { font-weight: bold; } /* Operator */
|
||||||
|
code span.ot { font-weight: bold; } /* Other */
|
||||||
|
code span.pp { color: #bc7a00; } /* Preprocessor */
|
||||||
|
code span.sc { color: #4070a0; } /* SpecialChar */
|
||||||
|
code span.ss { color: #bb6688; } /* SpecialString */
|
||||||
|
code span.st { color: #16801a; } /* String */
|
||||||
|
code span.va { color: #19177c; } /* Variable */
|
||||||
|
code span.vs { color: #4070a0; } /* VerbatimString */
|
||||||
|
code span.wa { color: #60a0b0; font-weight: bold; font-style: italic; } /* Warning */
|
||||||
@@ -0,0 +1,179 @@
|
|||||||
|
/*
|
||||||
|
This file is copied from
|
||||||
|
https://github.com/slotThe/slotThe.github.io/blob/main/css/sidenotes.css
|
||||||
|
with minor modifications made by Yu Cong.
|
||||||
|
|
||||||
|
The original author is Tony Zorman.
|
||||||
|
|
||||||
|
Extracted from:
|
||||||
|
|
||||||
|
https://github.com/edwardtufte/tufte-css
|
||||||
|
|
||||||
|
and modified to fit my website's theme.
|
||||||
|
*/
|
||||||
|
|
||||||
|
body {
|
||||||
|
counter-reset: sidenote-counter;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote,
|
||||||
|
.marginnote,
|
||||||
|
.marginnote-left {
|
||||||
|
float: right;
|
||||||
|
clear: right;
|
||||||
|
margin-right: -42%;
|
||||||
|
width: 36%;
|
||||||
|
margin-top: 0.3rem;
|
||||||
|
margin-bottom: 0;
|
||||||
|
font-size: 0.8em;
|
||||||
|
line-height: 1.2;
|
||||||
|
vertical-align: baseline;
|
||||||
|
position: relative;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
@media (max-width: 1200px) {
|
||||||
|
.sidenote,
|
||||||
|
.marginnote,
|
||||||
|
.marginnote-left {
|
||||||
|
margin-right: -40%;
|
||||||
|
width: 33%;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.marginnote-left {
|
||||||
|
float: left;
|
||||||
|
clear: left;
|
||||||
|
margin-left: -32%;
|
||||||
|
width: 25%;
|
||||||
|
position: relative;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
/* The first condition is for the case of a left-aligned layout (on a
|
||||||
|
smaller screen), and the second condition for a more centered layout
|
||||||
|
on a larger screen. It's a bit awkward, sadly :/ */
|
||||||
|
@media (max-width: 1349px) or ((min-width: 1367px) and (max-width: 1620px)) {
|
||||||
|
.marginnote-left {
|
||||||
|
margin-left: -33%;
|
||||||
|
width: 30%;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote code {
|
||||||
|
font-size: 0.94em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* For some reason, although only `overflow-x` is set in `default.css`,
|
||||||
|
block code in side and marginnotes gets a vertical (!) scrollbar no
|
||||||
|
matter what; disable that.
|
||||||
|
*/
|
||||||
|
div .marginnote pre,
|
||||||
|
div .sidenote pre {
|
||||||
|
overflow-y: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote-number {
|
||||||
|
counter-increment: sidenote-counter;
|
||||||
|
color: var(--color-link);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote-number:after,
|
||||||
|
.sidenote:before {
|
||||||
|
position: relative;
|
||||||
|
vertical-align: baseline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote-number:after {
|
||||||
|
content: counter(sidenote-counter);
|
||||||
|
font-size: 0.8rem;
|
||||||
|
top: -0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Properly position siednote number and adjust position of sidenote
|
||||||
|
paragraphs:
|
||||||
|
https://github.com/edwardtufte/tufte-css/issues/93#issuecomment-670695382
|
||||||
|
*/
|
||||||
|
.sidenote::before {
|
||||||
|
content: counter(sidenote-counter) " ";
|
||||||
|
font-size: 0.8rem;
|
||||||
|
top: -0.55rem;
|
||||||
|
position: absolute;
|
||||||
|
right: calc(100% + 0.5em);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote p {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote p:first-child {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote p:last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* */
|
||||||
|
|
||||||
|
input.margin-toggle {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
label.sidenote-number {
|
||||||
|
display: inline-block;
|
||||||
|
max-height: 2rem; /* should be less than or equal to paragraph line-height */
|
||||||
|
}
|
||||||
|
|
||||||
|
label.margin-toggle:not(.sidenote-number) {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.iframe-wrapper {
|
||||||
|
position: relative;
|
||||||
|
padding-bottom: 56.25%; /* 16:9 */
|
||||||
|
padding-top: 25px;
|
||||||
|
height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.iframe-wrapper iframe {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 1200px) {
|
||||||
|
label.margin-toggle:not(.sidenote-number) {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidenote,
|
||||||
|
.marginnote,
|
||||||
|
.marginnote-left {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Linkify sidenotes iff they are clickable */
|
||||||
|
.margin-toggle,
|
||||||
|
.sidenote-number:after {
|
||||||
|
color: var(--color-link);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.margin-toggle:checked + .sidenote,
|
||||||
|
.margin-toggle:checked + .marginnote,
|
||||||
|
.margin-toggle:checked + .marginnote-left {
|
||||||
|
display: block;
|
||||||
|
float: left;
|
||||||
|
left: 1rem;
|
||||||
|
clear: both;
|
||||||
|
width: 95%;
|
||||||
|
margin: 1rem 2.5%;
|
||||||
|
position: relative;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
label {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
Binary file not shown.
|
After Width: | Height: | Size: 2.6 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,37 @@
|
|||||||
|
name: hakyll-blog
|
||||||
|
version: 0.1.0.0
|
||||||
|
build-type: Simple
|
||||||
|
cabal-version: >= 1.10
|
||||||
|
|
||||||
|
|
||||||
|
executable site
|
||||||
|
hs-source-dirs: src
|
||||||
|
main-is: site.hs
|
||||||
|
other-modules: ChaoDoc, SideNoteHTML, Pangu
|
||||||
|
build-depends: base >= 4.18
|
||||||
|
, hakyll >= 4.15
|
||||||
|
, mtl >= 2.2.2
|
||||||
|
, pandoc
|
||||||
|
, pandoc-types >= 1.22.2.1
|
||||||
|
, pandoc-sidenote
|
||||||
|
, tagsoup
|
||||||
|
, text
|
||||||
|
, containers
|
||||||
|
-- , process
|
||||||
|
-- , regex-compat
|
||||||
|
, array
|
||||||
|
, filepath
|
||||||
|
-- , ghc-syntax-highlighter
|
||||||
|
-- , blaze-html >= 0.9
|
||||||
|
, megaparsec
|
||||||
|
, replace-megaparsec
|
||||||
|
ghc-options: -Weverything
|
||||||
|
-Wno-implicit-prelude
|
||||||
|
-Wno-missing-import-lists
|
||||||
|
-Wno-unused-packages
|
||||||
|
-Wno-missing-safe-haskell-mode
|
||||||
|
-Wno-all-missed-specialisations
|
||||||
|
-Wno-unsafe
|
||||||
|
-Wno-prepositive-qualified-module
|
||||||
|
-O2 -threaded -rtsopts -with-rtsopts=-N
|
||||||
|
default-language: Haskell2010
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
|
||||||
|
COMMANDS := build watch rebuild clean
|
||||||
|
.PHONY: $(COMMANDS), publish
|
||||||
|
|
||||||
|
# Set the default goal, so running 'make' without arguments will run 'make build'.
|
||||||
|
.DEFAULT_GOAL := build
|
||||||
|
|
||||||
|
|
||||||
|
# ---
|
||||||
|
$(COMMANDS): site
|
||||||
|
@echo "Running command: ./site $@"
|
||||||
|
-@./site $@
|
||||||
|
|
||||||
|
|
||||||
|
# --- Rules ---
|
||||||
|
# using relative symlinks should be fine since everything only works at ./
|
||||||
|
|
||||||
|
|
||||||
|
site: src/site.hs src/ChaoDoc.hs
|
||||||
|
cabal build
|
||||||
|
ln -sf "$(shell cabal list-bin exe:site)" site
|
||||||
|
|
||||||
|
# move from katex to mathjax
|
||||||
|
# katex_cli:
|
||||||
|
# cd katex_rust_fork && cargo build --release
|
||||||
|
# ln -sf ./katex_rust_fork/target/release/katex_cli katex_cli
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
|
||||||
|
\newcommand{\F}{\mathcal{F}}
|
||||||
|
\newcommand{\B}{\mathcal{B}}
|
||||||
|
\newcommand{\I}{\mathcal{I}}
|
||||||
|
\newcommand{\C}{\mathcal{C}}
|
||||||
|
\newcommand{\Hit}{\text{Hit}}
|
||||||
|
\newcommand{\HSGap}{\text{HSGap}}
|
||||||
|
\newcommand{\IntGap}{\text{IntGap}}
|
||||||
|
\newcommand{\OPTLP}{\text{OPTLP}}
|
||||||
|
\newcommand{\OPT}{\text{OPT}}
|
||||||
|
\newcommand{\DecRank}{\text{Rank-}k\text{-Reduction}}
|
||||||
|
\newcommand{\flats}{\operatorname{Flats}}
|
||||||
|
\newcommand{\cl}{\operatorname{cl}}
|
||||||
|
\newcommand{\gap}{\operatorname{gap}}
|
||||||
|
\newcommand{\igap}{\operatorname{igap}}
|
||||||
|
\newcommand{\cogirthratio}{\rho}
|
||||||
|
\newcommand{\cogirthratiowt}{\cogirthratio^{\mathrm{wt}}}
|
||||||
|
\newcommand{\strength}{\operatorname{strength}}
|
||||||
|
\newcommand{\argmin}{\operatorname{arg\,min}}
|
||||||
|
\newcommand{\argmax}{\operatorname{arg\,max}}
|
||||||
|
\newcommand{\del}{\setminus}
|
||||||
|
\newcommand{\con}{/}
|
||||||
|
\newcommand{\minor}{\mathrlap{/}{\setminus}}
|
||||||
|
\newcommand{\floor}[1]{\left\lfloor #1 \right\rfloor}
|
||||||
|
\newcommand{\ceil}[1]{\left\lceil #1 \right\rceil}
|
||||||
|
\newcommand{\set}[1]{\left\{ #1 \right\}}
|
||||||
|
\newcommand{\norm}[1]{\left\| #1 \right\|}
|
||||||
|
\newcommand{\rm}[1]{\operatorname{#1}}
|
||||||
|
\newcommand{\mex}{\operatorname{mex}}
|
||||||
|
\newcommand{\lcm}{\operatorname{lcm}}
|
||||||
|
\newcommand{\dist}{\operatorname{dist}}
|
||||||
|
\newcommand{\poly}{\operatorname{poly}}
|
||||||
|
\newcommand{\polylog}{\operatorname{polylog}}
|
||||||
|
\newcommand{\span}{\operatorname{span}}
|
||||||
@@ -0,0 +1,95 @@
|
|||||||
|
'''
|
||||||
|
Author: Tony Zorman
|
||||||
|
https://github.com/slotThe/slotThe.github.io/blob/main/scripts/opt-fonts.py
|
||||||
|
|
||||||
|
modified by Yu Cong
|
||||||
|
'''
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from fontTools.subset import Options, Subsetter
|
||||||
|
from fontTools.ttLib import TTFont
|
||||||
|
|
||||||
|
|
||||||
|
code_font = "IosevkaCustom"
|
||||||
|
text_font = "Lato"
|
||||||
|
# title_font = "Vollkorn"
|
||||||
|
# latex_font = "KaTeX"
|
||||||
|
|
||||||
|
|
||||||
|
def used_glyphs(path: str) -> tuple[str, str]:
|
||||||
|
html = [ # Get HTML for all pages
|
||||||
|
BeautifulSoup(Path(f"{p}/{f}").read_text(), "html.parser")
|
||||||
|
for (p, _, fs) in os.walk(path)
|
||||||
|
for f in fs
|
||||||
|
if f.endswith(".html")
|
||||||
|
]
|
||||||
|
|
||||||
|
# latex_html = [p.find_all("span", class_=re.compile("katex*")) for p in html]
|
||||||
|
# latex = set() # Glyphs used in LaTeX
|
||||||
|
# [latex.update(tag.get_text()) for page in latex_html for tag in page]
|
||||||
|
|
||||||
|
code_html = [page.find_all("code") for page in html] + [
|
||||||
|
page.find_all("div", class_=re.compile("highlight-*")) for page in html
|
||||||
|
]
|
||||||
|
code = set() # Glyphs used in code
|
||||||
|
[code.update(tag.get_text()) for page in code_html for tag in page]
|
||||||
|
|
||||||
|
# Fonts used only for titles and headings.
|
||||||
|
# title_html = [
|
||||||
|
# page.find_all(h) for page in html for h in ["h" + str(x) for x in range(1, 7)]
|
||||||
|
# ]
|
||||||
|
# title = set()
|
||||||
|
# [title.update(tag.get_text()) for page in title_html for tag in page]
|
||||||
|
|
||||||
|
# For the regular text, only keep what's strictly needed.
|
||||||
|
normal = set()
|
||||||
|
# [tag.extract() for page in latex_html for tag in page] # Mutates `hmtl`!
|
||||||
|
[tag.extract() for page in code_html for tag in page] # Mutates `html`!
|
||||||
|
[normal.update(page.get_text()) for page in html]
|
||||||
|
|
||||||
|
# Return only the relevant glyphs for each of the fonts.
|
||||||
|
return "HomeDraftsAbu".join(code), "↑".join(normal)
|
||||||
|
|
||||||
|
|
||||||
|
def optimise_font(in_file: str, out_file: str, text: str) -> None:
|
||||||
|
before_size = os.path.getsize(in_file) # might be that in_file = out_file
|
||||||
|
|
||||||
|
options = Options(hinting=False, desubroutinize=True)
|
||||||
|
if text_font in in_file:
|
||||||
|
options.layout_features = ["*"] # small-caps et al
|
||||||
|
font = TTFont(in_file, lazy=True)
|
||||||
|
font.flavor = "woff2"
|
||||||
|
subs = Subsetter(options)
|
||||||
|
subs.populate(text=text)
|
||||||
|
subs.subset(font)
|
||||||
|
font.save(out_file)
|
||||||
|
font.close()
|
||||||
|
|
||||||
|
print(
|
||||||
|
f"Size for {Path(in_file).stem} changed from "
|
||||||
|
f"{before_size / 1024:.1f}KB "
|
||||||
|
f"to {os.path.getsize(out_file) / 1024:.1f}KB"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
in_path = f"fonts/"
|
||||||
|
code, normal = used_glyphs(f"_site/")
|
||||||
|
|
||||||
|
for font in os.listdir(in_path):
|
||||||
|
in_file = in_path + font
|
||||||
|
optimise_font(
|
||||||
|
in_file,
|
||||||
|
f"fonts/{font.replace('.ttf', '.woff2')}",
|
||||||
|
code
|
||||||
|
if code_font in in_file
|
||||||
|
else normal,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
things don't work:
|
||||||
|
|
||||||
|
1. equation labels
|
||||||
|
2. pandoc does not support mathtools
|
||||||
|
3. cross document refs
|
||||||
|
4.
|
||||||
+579
@@ -0,0 +1,579 @@
|
|||||||
|
@article{GARAMVOLGYI20241,
|
||||||
|
title = {Count and cofactor matroids of highly connected graphs},
|
||||||
|
journal = {Journal of Combinatorial Theory, Series B},
|
||||||
|
volume = {166},
|
||||||
|
pages = {1-29},
|
||||||
|
year = {2024},
|
||||||
|
issn = {0095-8956},
|
||||||
|
doi = {https://doi.org/10.1016/j.jctb.2023.12.004},
|
||||||
|
url = {https://www.sciencedirect.com/science/article/pii/S0095895623001120},
|
||||||
|
author = {Dániel Garamvölgyi and Tibor Jordán and Csaba Király},
|
||||||
|
keywords = {Count matroid, Cofactor matroid, Rigid graph, Vertical connectivity, Connected matroid},
|
||||||
|
abstract = {We consider two types of matroids defined on the edge set of a graph G: count matroids Mk,ℓ(G), in which independence is defined by a sparsity count involving the parameters k and ℓ, and the C21-cofactor matroid C(G), in which independence is defined by linear independence in the cofactor matrix of G. We show, for each pair (k,ℓ), that if G is sufficiently highly connected, then G−e has maximum rank for all e∈E(G), and the matroid Mk,ℓ(G) is connected. These results unify and extend several previous results, including theorems of Nash-Williams and Tutte (k=ℓ=1), and Lovász and Yemini (k=2,ℓ=3). We also prove that if G is highly connected, then the vertical connectivity of C(G) is also high. We use these results to generalize Whitney's celebrated result on the graphic matroid of G (which corresponds to M1,1(G)) to all count matroids and to the C21-cofactor matroid: if G is highly connected, depending on k and ℓ, then the count matroid Mk,ℓ(G) uniquely determines G; and similarly, if G is 14-connected, then its C21-cofactor matroid C(G) uniquely determines G. We also derive similar results for the t-fold union of the C21-cofactor matroid, and use them to prove that every 24-connected graph has a spanning tree T for which G−E(T) is 3-connected, which verifies a case of a conjecture of Kriesell.}
|
||||||
|
}
|
||||||
|
@article{geelen_computing_2018,
|
||||||
|
title = {Computing {Girth} and {Cogirth} in {Perturbed} {Graphic} {Matroids}},
|
||||||
|
volume = {38},
|
||||||
|
issn = {0209-9683, 1439-6912},
|
||||||
|
url = {http://link.springer.com/10.1007/s00493-016-3445-3},
|
||||||
|
doi = {10.1007/s00493-016-3445-3},
|
||||||
|
language = {en},
|
||||||
|
number = {1},
|
||||||
|
urldate = {2023-03-02},
|
||||||
|
journal = {Combinatorica},
|
||||||
|
author = {Geelen, Jim and Kapadia, Rohan},
|
||||||
|
month = feb,
|
||||||
|
year = {2018},
|
||||||
|
pages = {167--191},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{Gu18,
|
||||||
|
abstract = { Rigidity, arising in discrete geometry, is the property of a structure that does not flex. Laman provides a combinatorial characterization of rigid graphs in the Euclidean plane, and thus rigid graphs in the Euclidean plane have applications in graph theory. We discover a sufficient partition condition of packing spanning rigid subgraphs and spanning trees. As a corollary, we show that a simple graph \$G\$ contains a packing of \$k\$ spanning rigid subgraphs and l spanning trees if \$G\$ is \$(4k+2l)\$-edge-connected, and \$G-Z\$ is essentially \$(6k+2l - 2k|Z|)\$-edge-connected for every \$Z\subset V(G)\$. Thus every \$(4k+2l)\$-connected and essentially \$(6k+2l)\$-connected graph \$G\$ contains a packing of \$k\$ spanning rigid subgraphs and l spanning trees. Utilizing this, we show that every 6-connected and essentially 8-connected graph \$G\$ contains a spanning tree \$T\$ such that \$G-E(T)\$ is 2-connected. These improve some previous results. Sparse subgraph covering problems are also studied. },
|
||||||
|
author = {Gu, Xiaofeng},
|
||||||
|
doi = {10.1137/17M1134196},
|
||||||
|
eprint = {https://doi.org/10.1137/17M1134196},
|
||||||
|
journal = {SIAM Journal on Discrete Mathematics},
|
||||||
|
number = {2},
|
||||||
|
pages = {1305-1313},
|
||||||
|
title = {Spanning Rigid Subgraph Packing and Sparse Subgraph Covering},
|
||||||
|
url = {https://doi.org/10.1137/17M1134196},
|
||||||
|
volume = {32},
|
||||||
|
year = {2018},
|
||||||
|
bdsk-url-1 = {https://doi.org/10.1137/17M1134196}}
|
||||||
|
|
||||||
|
@InProceedings{JordanKMM20,
|
||||||
|
author="Jord{\'a}n, Tibor
|
||||||
|
and Kobayashi, Yusuke
|
||||||
|
and Mahara, Ryoga
|
||||||
|
and Makino, Kazuhisa",
|
||||||
|
editor="G{\k{a}}sieniec, Leszek
|
||||||
|
and Klasing, Ralf
|
||||||
|
and Radzik, Tomasz",
|
||||||
|
title="The Steiner Problem for Count Matroids",
|
||||||
|
booktitle="Combinatorial Algorithms",
|
||||||
|
year="2020",
|
||||||
|
publisher="Springer International Publishing",
|
||||||
|
address="Cham",
|
||||||
|
pages="330--342",
|
||||||
|
abstract="We introduce and study a generalization of the well-known Steiner tree problem to count matroids. In the count matroid {\$}{\$}{\backslash}mathcal{\{}M{\}}{\_}{\{}k,l{\}}(G){\$}{\$}, defined on the edge set of a graph {\$}{\$}G=(V,E){\$}{\$}, a set {\$}{\$}F{\backslash}subseteq E{\$}{\$} is independent if every vertex set {\$}{\$}X{\backslash}subseteq V{\$}{\$} spans at most {\$}{\$}k|X|-l{\$}{\$} edges of F. The graph is called (k, l)-tight if its edge set is independent in {\$}{\$}{\backslash}mathcal{\{}M{\}}{\_}{\{}k,l{\}}(G){\$}{\$} and {\$}{\$}|E|=k|V|-l{\$}{\$} holds.",
|
||||||
|
isbn="978-3-030-48966-3"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@article{LeeS08,
|
||||||
|
title = {Pebble game algorithms and sparse graphs},
|
||||||
|
volume = {308},
|
||||||
|
issn = {0012365X},
|
||||||
|
doi = {10.1016/j.disc.2007.07.104},
|
||||||
|
abstract = {A multi-graph G on n vertices is (k, ℓ)-sparse if every subset of n′ ≤ n vertices spans at most kn′ - ℓ edges. G is tight if, in addition, it has exactly kn - ℓ edges. For integer values k and ℓ ∈ [0, 2 k), we characterize the (k, ℓ)-sparse graphs via a family of simple, elegant and efficient algorithms called the (k, ℓ)-pebble games. © 2007 Elsevier B.V. All rights reserved.},
|
||||||
|
number = {8},
|
||||||
|
journal = {Discrete Mathematics},
|
||||||
|
author = {Lee, Audrey and Streinu, Ileana},
|
||||||
|
year = {2008},
|
||||||
|
note = {arXiv: math/0702129},
|
||||||
|
keywords = {Circuit, Henneberg sequence, Matroid, Pebble game, Rigidity, Sparse graph},
|
||||||
|
pages = {1425--1437},
|
||||||
|
file = {PDF:/Users/chaoxu/Zotero/storage/NP5ACYFI/2008-Pebble_game_algorithms_and_sparse_graphs.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{StreinuT09,
|
||||||
|
title = {Sparsity-certifying {Graph} {Decompositions}},
|
||||||
|
volume = {25},
|
||||||
|
issn = {1435-5914},
|
||||||
|
url = {https://doi.org/10.1007/s00373-008-0834-4},
|
||||||
|
doi = {10.1007/s00373-008-0834-4},
|
||||||
|
abstract = {We describe a new algorithm, the (k, ℓ)-pebble game with colors, and use it to obtain a characterization of the family of (k, ℓ)-sparse graphs and algorithmic solutions to a family of problems concerning tree decompositions of graphs. Special instances of sparse graphs appear in rigidity theory and have received increased attention in recent years. In particular, our colored pebbles generalize and strengthen the previous results of Lee and Streinu [12] and give a new proof of the Tutte-Nash-Williams characterization of arboricity. We also present a new decomposition that certifies sparsity based on the (k, ℓ)-pebble game with colors. Our work also exposes connections between pebble game algorithms and previous sparse graph algorithms by Gabow [5], Gabow and Westermann [6] and Hendrickson [9].},
|
||||||
|
number = {2},
|
||||||
|
journal = {Graphs and Combinatorics},
|
||||||
|
author = {Streinu, Ileana and Theran, Louis},
|
||||||
|
month = may,
|
||||||
|
year = {2009},
|
||||||
|
pages = {219--238},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@article{Servatius91,
|
||||||
|
abstract = {We give a characterization of the dual of the 2-dimensional generic rigidity matroid R(G) of a graph G and derive necessary and sufficient conditions for a connected matroid to be the rigidity matroid of a birigid graph.},
|
||||||
|
author = {Brigitte Servatius},
|
||||||
|
doi = {https://doi.org/10.1016/0095-8956(91)90056-P},
|
||||||
|
issn = {0095-8956},
|
||||||
|
journal = {Journal of Combinatorial Theory, Series B},
|
||||||
|
number = {1},
|
||||||
|
pages = {106-113},
|
||||||
|
title = {On the two-dimensional generic rigidity matroid and its dual},
|
||||||
|
url = {https://www.sciencedirect.com/science/article/pii/009589569190056P},
|
||||||
|
volume = {53},
|
||||||
|
year = {1991},
|
||||||
|
bdsk-url-1 = {https://www.sciencedirect.com/science/article/pii/009589569190056P},
|
||||||
|
bdsk-url-2 = {https://doi.org/10.1016/0095-8956(91)90056-P}}
|
||||||
|
|
||||||
|
@PHDTHESIS {Fahad15,
|
||||||
|
author = "Fahad, P",
|
||||||
|
title = "Dynamic Programming using Representative Families",
|
||||||
|
school = "HOMI BHABHA NATIONAL INSTITUTE",
|
||||||
|
year = "2015",
|
||||||
|
month = "jul"
|
||||||
|
}
|
||||||
|
|
||||||
|
@article {BaileyNS14,
|
||||||
|
AUTHOR = {Bailey, Robert F. and Newman, Mike and Stevens, Brett},
|
||||||
|
TITLE = {A note on packing spanning trees in graphs and bases in
|
||||||
|
matroids},
|
||||||
|
JOURNAL = {Australas. J. Combin.},
|
||||||
|
FJOURNAL = {The Australasian Journal of Combinatorics},
|
||||||
|
VOLUME = {59},
|
||||||
|
YEAR = {2014},
|
||||||
|
PAGES = {24--38},
|
||||||
|
ISSN = {1034-4942}
|
||||||
|
}
|
||||||
|
@article{ChekuriGN06,
|
||||||
|
author = {Chandra Chekuri and Sudipto Guha and Joseph (Seffi) Naor},
|
||||||
|
title = {The Steiner k-Cut Problem},
|
||||||
|
journal = {SIAM Journal on Discrete Mathematics},
|
||||||
|
volume = {20},
|
||||||
|
number = {1},
|
||||||
|
pages = {261-271},
|
||||||
|
year = {2006},
|
||||||
|
doi = {10.1137/S0895480104445095},
|
||||||
|
}
|
||||||
|
@article{Vardy97,
|
||||||
|
author = {Alexander Vardy},
|
||||||
|
title = {The Intractability of Computing the Minimum Distance of a Code},
|
||||||
|
journal = {IEEE Trans. Inf. Theor.},
|
||||||
|
volume = {43},
|
||||||
|
number = {6},
|
||||||
|
month = {Nov},
|
||||||
|
year = {1997},
|
||||||
|
issn = {0018-9448},
|
||||||
|
pages = {1757--1766},
|
||||||
|
numpages = {10},
|
||||||
|
doi = {10.1109/18.641542},
|
||||||
|
acmid = {2265234},
|
||||||
|
publisher = {IEEE Press},
|
||||||
|
address = {Piscataway, NJ, USA},
|
||||||
|
}
|
||||||
|
@InProceedings{FominGLS17,
|
||||||
|
author ={Fedor V. Fomin and Petr A. Golovach and Daniel Lokshtanov and Saket Saurabh},
|
||||||
|
title ={{Covering Vectors by Spaces: Regular Matroids}},
|
||||||
|
booktitle ={44th International Colloquium on Automata, Languages, and Programming (ICALP 2017)},
|
||||||
|
pages ={56:1--56:15},
|
||||||
|
series ={Leibniz International Proceedings in Informatics (LIPIcs)},
|
||||||
|
ISBN ={978-3-95977-041-5},
|
||||||
|
ISSN ={1868-8969},
|
||||||
|
year ={2017},
|
||||||
|
volume ={80},
|
||||||
|
editor ={Ioannis Chatzigiannakis and Piotr Indyk and Fabian Kuhn and Anca Muscholl},
|
||||||
|
publisher ={Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},
|
||||||
|
address ={Dagstuhl, Germany},
|
||||||
|
doi ={10.4230/LIPIcs.ICALP.2017.56}
|
||||||
|
}
|
||||||
|
@book{Oxley06,
|
||||||
|
title={Matroid Theory},
|
||||||
|
author={Oxley, James G.},
|
||||||
|
isbn={9780199202508},
|
||||||
|
series={Oxford graduate texts in mathematics},
|
||||||
|
year={2006},
|
||||||
|
publisher={Oxford University Press}
|
||||||
|
}
|
||||||
|
@Inbook{PanolanRS15,
|
||||||
|
author="Panolan, Fahad
|
||||||
|
and Ramanujan, M. S.
|
||||||
|
and Saurabh, Saket",
|
||||||
|
title="On the Parameterized Complexity of Girth and Connectivity Problems on Linear Matroids",
|
||||||
|
bookTitle="Algorithms and Data Structures: 14th International Symposium, WADS 2015, Victoria, BC, Canada, August 5-7, 2015. Proceedings",
|
||||||
|
year="2015",
|
||||||
|
publisher="Springer International Publishing",
|
||||||
|
pages="566--577",
|
||||||
|
isbn="978-3-319-21840-3",
|
||||||
|
doi="10.1007/978-3-319-21840-3_47",
|
||||||
|
}
|
||||||
|
@techreport{Kiraly09,
|
||||||
|
AUTHOR = {Kir{\'a}ly, Tam{\'a}s},
|
||||||
|
TITLE = {Computing the minimum cut in hypergraphic matroids},
|
||||||
|
NOTE = {{\tt www.cs.elte.hu/egres}},
|
||||||
|
INSTITUTION = {Egerv{\'a}ry Research Group, Budapest},
|
||||||
|
YEAR = {2009},
|
||||||
|
NUMBER = {QP-2009-05}
|
||||||
|
}
|
||||||
|
@article{JoretV15,
|
||||||
|
TITLE = {{Reducing the rank of a matroid}},
|
||||||
|
AUTHOR = {Joret, Gwenaël and Vetta, Adrian},
|
||||||
|
JOURNAL = {{Discrete Mathematics \& Theoretical Computer Science}},
|
||||||
|
VOLUME = {{Vol. 17 no.2}},
|
||||||
|
YEAR = {2015},
|
||||||
|
MONTH = Sep
|
||||||
|
}
|
||||||
|
@article{karger_minimum_2000,
|
||||||
|
author = {Karger, David R.},
|
||||||
|
title = {Minimum cuts in near-linear time},
|
||||||
|
journal = {Journal of the ACM},
|
||||||
|
volume = {47},
|
||||||
|
number = {1},
|
||||||
|
pages = {46--76},
|
||||||
|
year = {2000},
|
||||||
|
doi = {10.1145/331605.331608},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{Karger98,
|
||||||
|
title = {Random sampling and greedy sparsification for matroid optimization problems},
|
||||||
|
volume = {82},
|
||||||
|
issn = {0025-5610},
|
||||||
|
doi = {10.1007/BF01585865},
|
||||||
|
abstract = {Random sampling is a powerful tool for gathering information about a{\textbackslash}ngroup by considering only a small part of it. We discuss some broadly{\textbackslash}napplicable paradigms for using random sampling in combinatorial{\textbackslash}noptimization, and demonstrate the effectiveness of these paradigms for{\textbackslash}ntwo optimization problems on matroids: finding an optimum matroid basis{\textbackslash}nand packing disjoint matroid bases. Application of these ideas to the{\textbackslash}ngraphic matroid led to fast algorithms for minimum spanning trees and{\textbackslash}nminimum cuts. An optimum matroid basis is typically found by a greedy{\textbackslash}nalgorithm that grows an independent set into an optimum bash; one{\textbackslash}nelement at a time. This continuous change in the independent set can{\textbackslash}nmake it hard to perform the independence tests needed by the greedy{\textbackslash}nalgorithm. We simplify matters by using sampling to reduce the problem{\textbackslash}nof finding an optimum matroid basis to the problem of verifying that a{\textbackslash}ngiven fixed basis is optimum, showing that the two problems can be{\textbackslash}nsolved in roughly the same time. Another application of sampling is to{\textbackslash}npacking matroid bases, also known as matroid partitioning. Sampling{\textbackslash}nreduces the number of bases that must be packed. We combine sampling{\textbackslash}nwith a greedy packing strategy that reduces the size of the matroid.{\textbackslash}nTogether, these techniques give accelerated packing algorithms. We give{\textbackslash}nparticular attention to the problem of packing spanning trees in graphs,{\textbackslash}nwhich has applications in network reliability analysis. Our results can{\textbackslash}nbe seen as generalizing certain results from random graph theory.The{\textbackslash}ntechniques :have also been effective for other packing problems. (C){\textbackslash}n1998 The Mathematical Programming Society, Inc. Published by Elsevier{\textbackslash}nScience B.V.},
|
||||||
|
number = {1-2},
|
||||||
|
journal = {Mathematical Programming},
|
||||||
|
author = {Karger, David R},
|
||||||
|
year = {1998},
|
||||||
|
keywords = {greedy algorithm, matroid basis, random sampling},
|
||||||
|
pages = {41--81},
|
||||||
|
file = {PDF:/Users/chaoxu/Zotero/storage/FN258PYE/Karger - 1998 - Random sampling and greedy sparsification for matroid optimization problems.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
@article{GurjarTV17,
|
||||||
|
author = {Rohit Gurjar and
|
||||||
|
Thomas Thierauf and
|
||||||
|
Nisheeth K. Vishnoi},
|
||||||
|
title = {Isolating a Vertex via Lattices: Polytopes with Totally Unimodular
|
||||||
|
Faces},
|
||||||
|
journal = {CoRR},
|
||||||
|
volume = {abs/1708.02222},
|
||||||
|
year = {2017},
|
||||||
|
url = {http://arxiv.org/abs/1708.02222},
|
||||||
|
timestamp = {Tue, 05 Sep 2017 10:03:46 +0200},
|
||||||
|
biburl = {http://dblp.org/rec/bib/journals/corr/abs-1708-02222},
|
||||||
|
bibsource = {dblp computer science bibliography, http://dblp.org}
|
||||||
|
}
|
||||||
|
|
||||||
|
@book{schrijver_combinatorial_2003,
|
||||||
|
address = {Berlin Heidelberg},
|
||||||
|
series = {Algorithms and combinatorics},
|
||||||
|
title = {Combinatorial optimization: polyhedra and efficiency},
|
||||||
|
isbn = {978-3-540-44389-6},
|
||||||
|
shorttitle = {Combinatorial optimization},
|
||||||
|
language = {en},
|
||||||
|
number = {24},
|
||||||
|
publisher = {Springer},
|
||||||
|
author = {Schrijver, Alexander},
|
||||||
|
year = {2003},
|
||||||
|
file = {Schrijver - 2003 - Combinatorial optimization polyhedra and efficien.pdf:/Users/congyu/Zotero/storage/8369KL3F/Schrijver - 2003 - Combinatorial optimization polyhedra and efficien.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{chekuri_lp_2020,
|
||||||
|
title = {{LP} {Relaxation} and {Tree} {Packing} for {Minimum} \$k\$-{Cut}},
|
||||||
|
volume = {34},
|
||||||
|
issn = {0895-4801, 1095-7146},
|
||||||
|
url = {https://epubs.siam.org/doi/10.1137/19M1299359},
|
||||||
|
doi = {10.1137/19M1299359},
|
||||||
|
abstract = {Karger used spanning tree packings [D. R. Karger, J. ACM, 47 (2000), pp. 46-76] to derive a near linear-time randomized algorithm for the global minimum cut problem as well as a bound on the number of approximate minimum cuts. This is a different approach from his well-known random contraction algorithm [D. R. Karger, Random Sampling in Graph Optimization Problems, Ph.D. thesis, Stanford University, Stanford, CA, 1995, D. R. Karger and C. Stein, J. ACM, 43 (1996), pp. 601--640]. Thorup developed a fast deterministic algorithm for the minimum k-cut problem via greedy recursive tree packings [M. Thorup, Minimum k-way cuts via deterministic greedy tree packing, in Proceedings of the Fortieth Annual ACM Symposium on Theory of Computing, ACM, 2008, pp. 159--166]. In this paper we revisit properties of an LP relaxation for k-Cut proposed by Naor and Rabani [Tree packing and approximating k-cuts, in Proceedings of the Twelfth Annual ACM-SIAM Symposium on Discrete Algorithms, Vol. 103, SIAM, Philadelphia, 2001, pp. 26--27], and analyzed in [C. Chekuri, S. Guha, and J. Naor, SIAM J. Discrete Math., 20 (2006), pp. 261--271]. We show that the dual of the LP yields a tree packing that, when combined with an upper bound on the integrality gap for the LP, easily and transparently extends Karger's analysis for mincut to the k-cut problem. In addition to the simplicity of the algorithm and its analysis, this allows us to improve the running time of Thorup's algorithm by a factor of n. We also improve the bound on the number of {\textbackslash}alpha -approximate k-cuts. Second, we give a simple proof that the integrality gap of the LP is 2(1 - 1/n). Third, we show that an optimum solution to the LP relaxation, for all values of k, is fully determined by the principal sequence of partitions of the input graph. This allows us to relate the LP relaxation to the Lagrangean relaxation approach of Barahona [Oper. Res. Lett., 26 (2000), pp. 99--105] and Ravi and Sinha [European J. Oper. Res., 186 (2008), pp. 77--90]; it also shows that the idealized recursive tree packing considered by Thorup gives an optimum dual solution to the LP.},
|
||||||
|
language = {en},
|
||||||
|
number = {2},
|
||||||
|
urldate = {2022-04-10},
|
||||||
|
journal = {SIAM Journal on Discrete Mathematics},
|
||||||
|
author = {Chekuri, Chandra and Quanrud, Kent and Xu, Chao},
|
||||||
|
month = jan,
|
||||||
|
year = {2020},
|
||||||
|
keywords = {Approximation, K-cut, Minimum cut, Tree packing},
|
||||||
|
pages = {1334--1353},
|
||||||
|
file = {Chekuri et al. - 2020 - LP Relaxation and Tree Packing for Minimum \$k\$-Cut.pdf:/Users/congyu/Zotero/storage/XDUPHUTC/Chekuri et al. - 2020 - LP Relaxation and Tree Packing for Minimum \$k\$-Cut.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{boros2003algorithms,
|
||||||
|
title={Algorithms for enumerating circuits in matroids},
|
||||||
|
author={Boros, Endre and Elbassioni, Khaled and Gurvich, Vladimir and Khachiyan, Leonid},
|
||||||
|
booktitle={International Symposium on Algorithms and Computation},
|
||||||
|
pages={485--494},
|
||||||
|
year={2003},
|
||||||
|
organization={Springer}
|
||||||
|
}
|
||||||
|
|
||||||
|
@incollection{whiteley_matroids_1996,
|
||||||
|
address = {Providence, Rhode Island},
|
||||||
|
title = {Some matroids from discrete applied geometry},
|
||||||
|
volume = {197},
|
||||||
|
isbn = {978-0-8218-0508-4 978-0-8218-7788-3},
|
||||||
|
url = {http://www.ams.org/conm/197/},
|
||||||
|
abstract = {We present an array of matroids drawn from three sources in discrete applied geometry: (i) static (or first-order) rigidity of frameworks and higher skeletal rigidity; (ii) parallel drawings (or equivalently polyhedral pictures); and (iii) Crr-1-cofactors abstracted from multivariate splines in all dimensions. The strong analogies (sometimes isomorphisms) between generic rigidity matroids and generic cofactor matroids is one central theme of the chapter. We emphasize matroidal results for the combinatorial ‘generic’ situations, with geometric techniques used when they contribute combinatorial insights. A second basic theme is the analysis of represented matroids using the duality of row and column dependencies of the representing matrix (generalizing statics and kinematics in rigidity).},
|
||||||
|
language = {en},
|
||||||
|
urldate = {2024-06-11},
|
||||||
|
booktitle = {Contemporary {Mathematics}},
|
||||||
|
publisher = {American Mathematical Society},
|
||||||
|
author = {Whiteley, Walter},
|
||||||
|
editor = {Bonin, Joseph E. and Oxley, James G. and Servatius, Brigitte},
|
||||||
|
year = {1996},
|
||||||
|
doi = {10.1090/conm/197/02540},
|
||||||
|
pages = {171--311},
|
||||||
|
file = {Whiteley - 1996 - Some matroids from discrete applied geometry.pdf:/Users/congyu/Zotero/storage/VG5DFCBQ/Whiteley - 1996 - Some matroids from discrete applied geometry.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{pym_submodular_1970,
|
||||||
|
title = {Submodular functions and independence structures},
|
||||||
|
volume = {30},
|
||||||
|
copyright = {https://www.elsevier.com/tdm/userlicense/1.0/},
|
||||||
|
issn = {0022247X},
|
||||||
|
url = {https://linkinghub.elsevier.com/retrieve/pii/0022247X70901800},
|
||||||
|
doi = {10.1016/0022-247X(70)90180-0},
|
||||||
|
language = {en},
|
||||||
|
number = {1},
|
||||||
|
urldate = {2024-06-14},
|
||||||
|
journal = {Journal of Mathematical Analysis and Applications},
|
||||||
|
author = {Pym, J.S and Perfect, Hazel},
|
||||||
|
month = apr,
|
||||||
|
year = {1970},
|
||||||
|
pages = {1--31},
|
||||||
|
file = {Pym and Perfect - 1970 - Submodular functions and independence structures.pdf:/Users/congyu/Zotero/storage/M85CHJF6/Pym and Perfect - 1970 - Submodular functions and independence structures.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
|
||||||
|
@book{frank_connections_2011,
|
||||||
|
title = {Connections in {Combinatorial} {Optimization}},
|
||||||
|
isbn = {978-0-19-920527-1},
|
||||||
|
url = {http://scholar.google.com/scholar?hl=en&btnG=Search&q=intitle:Connections+in+Combinatorial+Optimization#0},
|
||||||
|
urldate = {2014-07-17},
|
||||||
|
publisher = {Oxford University Press},
|
||||||
|
author = {Frank, András},
|
||||||
|
year = {2011},
|
||||||
|
note = {Publication Title: Oxford Lecture Series in Mathematics and Its Applications},
|
||||||
|
file = {PDF:/Users/congyu/Zotero/storage/7WP6YL2K/2011-Connections_in_Combinatorial_Optimization.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{catlin_fractional_1992,
|
||||||
|
title = {Fractional arboricity, strength, and principal partitions in graphs and matroids},
|
||||||
|
volume = {40},
|
||||||
|
copyright = {https://www.elsevier.com/tdm/userlicense/1.0/},
|
||||||
|
issn = {0166218X},
|
||||||
|
url = {https://linkinghub.elsevier.com/retrieve/pii/0166218X9290002R},
|
||||||
|
doi = {10.1016/0166-218X(92)90002-R},
|
||||||
|
abstract = {Catlin, P.A., J.W. Grossman, A.M. Hobbs and H.-J. Lai, Fractional arboricity, strength, and principal partitions in graphs and matroids, Discrete Applied Mathematics 40 (1992) 285-302.},
|
||||||
|
language = {en},
|
||||||
|
number = {3},
|
||||||
|
urldate = {2024-04-29},
|
||||||
|
journal = {Discrete Applied Mathematics},
|
||||||
|
author = {Catlin, Paul A. and Grossman, Jerrold W. and Hobbs, Arthur M. and Lai, Hong-Jian},
|
||||||
|
month = dec,
|
||||||
|
year = {1992},
|
||||||
|
pages = {285--302},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{haas_characterizations_2002,
|
||||||
|
title = {Characterizations of {Arboricity} of {Graphs}},
|
||||||
|
volume = {63},
|
||||||
|
abstract = {The aim of this paper is to give several characterizations for the following two classes of graphs: (i) graphs for which adding any l edges produces a graph which is decomposible into k spanning trees and (ii) graphs for which adding some l edges produces a graph which is decomposible into k spanning trees.},
|
||||||
|
journal = {Ars Comb.},
|
||||||
|
author = {Haas, Ruth},
|
||||||
|
month = apr,
|
||||||
|
year = {2002},
|
||||||
|
keywords = {base packing, sparsity},
|
||||||
|
file = {Full Text PDF:/Users/congyu/Zotero/storage/HJJNU6UK/Haas - 2002 - Characterizations of Arboricity of Graphs.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{jackson_generic_2010,
|
||||||
|
series = {Combinatorics and {Geometry}},
|
||||||
|
title = {The generic rank of body--bar-and-hinge frameworks},
|
||||||
|
volume = {31},
|
||||||
|
issn = {0195-6698},
|
||||||
|
url = {https://www.sciencedirect.com/science/article/pii/S0195669809000973},
|
||||||
|
doi = {10.1016/j.ejc.2009.03.030},
|
||||||
|
abstract = {Tay [T.S. Tay, Rigidity of multi-graphs I Linking Bodies in n-space, J. Combin. Theory B 26 (1984) 95--112] characterized the multigraphs which can be realized as infinitesimally rigid d-dimensional body-and-bar frameworks. Subsequently, Tay [T.S. Tay, Linking (n−2)-dimensional panels in n-space II: (n−2,2)-frameworks and body and hinge structures, Graphs Combin. 5 (1989) 245--273] and Whiteley [W. Whiteley, The union of matroids and the rigidity of frameworks, SIAM J. Discrete Math. 1 (2) (1988) 237--255] independently characterized the multigraphs which can be realized as infinitesimally rigid d-dimensional body-and-hinge frameworks. We adapt Whiteley’s proof technique to characterize the multigraphs which can be realized as infinitesimally rigid d-dimensional body--bar-and-hinge frameworks. More importantly, we obtain a sufficient condition for a multigraph to be realized as an infinitesimally rigid d-dimensional body-and-hinge framework in which all hinges lie in the same hyperplane. This result is related to a long-standing conjecture of Tay and Whiteley [T.S. Tay, W. Whiteley, Recent advances in the generic rigidity of structures, Structural Topology 9 (1984) 31--38] which would characterize when a multigraph can be realized as an infinitesimally rigid d-dimensional body-and-hinge framework in which all the hinges incident to each body lie in a common hyperplane. As a corollary we deduce that if a graph G has three spanning trees which use each edge of G at most twice, then its square can be realized as an infinitesimally rigid three-dimensional bar-and-joint framework.},
|
||||||
|
number = {2},
|
||||||
|
urldate = {2026-03-12},
|
||||||
|
journal = {European Journal of Combinatorics},
|
||||||
|
author = {Jackson, Bill and Jord{\'a}n, Tibor},
|
||||||
|
month = feb,
|
||||||
|
year = {2010},
|
||||||
|
pages = {574--588},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{Nash-Williams_1961,
|
||||||
|
title = {Edge-Disjoint Spanning Trees of Finite Graphs},
|
||||||
|
volume = {s1-36},
|
||||||
|
rights = {http://doi.wiley.com/10.1002/tdm_license_1.1},
|
||||||
|
issn = {00246107},
|
||||||
|
url = {http://doi.wiley.com/10.1112/jlms/s1-36.1.445},
|
||||||
|
doi = {10.1112/jlms/s1-36.1.445},
|
||||||
|
number = {1},
|
||||||
|
journal = {Journal of the London Mathematical Society},
|
||||||
|
author = {Nash-Williams, C. St.J. A.},
|
||||||
|
year = {1961},
|
||||||
|
pages = {445--450},
|
||||||
|
language = {en},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{fujishige_lexicographically_1980,
|
||||||
|
title = {Lexicographically {Optimal} {Base} of a {Polymatroid} with {Respect} to a {Weight} {Vector}},
|
||||||
|
volume = {5},
|
||||||
|
issn = {0364-765X},
|
||||||
|
url = {https://doi.org/10.1287/moor.5.2.186},
|
||||||
|
doi = {10.1287/moor.5.2.186},
|
||||||
|
number = {2},
|
||||||
|
journal = {Mathematics of Operations Research},
|
||||||
|
author = {Fujishige, Satoru},
|
||||||
|
year = {1980},
|
||||||
|
pages = {186--196},
|
||||||
|
}
|
||||||
|
|
||||||
|
@incollection{fujishige_principal_2009,
|
||||||
|
title = {Theory of {Principal} {Partitions} {Revisited}},
|
||||||
|
booktitle = {Research {Trends} in {Combinatorial} {Optimization}},
|
||||||
|
publisher = {Springer Berlin Heidelberg},
|
||||||
|
address = {Berlin, Heidelberg},
|
||||||
|
author = {Fujishige, Satoru},
|
||||||
|
editor = {Cook, William J. and Lov{\'a}sz, L{\'a}szl{\'o} and Vygen, Jens},
|
||||||
|
year = {2009},
|
||||||
|
pages = {127--162},
|
||||||
|
doi = {10.1007/978-3-540-76796-1_7},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{truong_modulus_2025,
|
||||||
|
title = {Modulus for bases of matroids},
|
||||||
|
volume = {348},
|
||||||
|
url = {https://doi.org/10.1016/j.disc.2025.114395},
|
||||||
|
doi = {10.1016/j.disc.2025.114395},
|
||||||
|
number = {5},
|
||||||
|
journal = {Discrete Mathematics},
|
||||||
|
author = {Truong, Huy and Poggi-Corradini, Pietro},
|
||||||
|
year = {2025},
|
||||||
|
pages = {114395},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{zaslavsky_biased_1989,
|
||||||
|
author = {Zaslavsky, Thomas},
|
||||||
|
title = {Biased graphs. {I}. {B}ias, balance, and gains},
|
||||||
|
journal = {Journal of Combinatorial Theory, Series B},
|
||||||
|
volume = {47},
|
||||||
|
number = {1},
|
||||||
|
pages = {32--52},
|
||||||
|
year = {1989},
|
||||||
|
doi = {10.1016/0095-8956(89)90063-4},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{zaslavsky_biased_1991,
|
||||||
|
author = {Zaslavsky, Thomas},
|
||||||
|
title = {Biased graphs. {II}. {T}he three matroids},
|
||||||
|
journal = {Journal of Combinatorial Theory, Series B},
|
||||||
|
volume = {51},
|
||||||
|
number = {1},
|
||||||
|
pages = {46--72},
|
||||||
|
year = {1991},
|
||||||
|
doi = {10.1016/0095-8956(91)90005-5},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{dejesus_kcircular_2017,
|
||||||
|
author = {De Jes\'{u}s, Jos\'{e} F. and Kelmans, Alexander K.},
|
||||||
|
title = {\(k\)-circular matroids of graphs},
|
||||||
|
journal = {Discrete Applied Mathematics},
|
||||||
|
volume = {225},
|
||||||
|
pages = {33--50},
|
||||||
|
year = {2017},
|
||||||
|
doi = {10.1016/j.dam.2017.02.026},
|
||||||
|
}
|
||||||
|
|
||||||
|
@incollection{dinits_structure_1976,
|
||||||
|
author = {Dinits, E. A. and Karzanov, A. V. and Lomonosov, M. V.},
|
||||||
|
title = {On the structure of a family of minimal weighted cuts in a graph},
|
||||||
|
booktitle = {Studies in Discrete Optimization},
|
||||||
|
editor = {Fridman, A. A.},
|
||||||
|
publisher = {Nauka},
|
||||||
|
address = {Moscow},
|
||||||
|
year = {1976},
|
||||||
|
pages = {290--306},
|
||||||
|
note = {In Russian},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{goldschmidt_polynomial_1994,
|
||||||
|
author = {Goldschmidt, Olivier and Hochbaum, Dorit S.},
|
||||||
|
title = {A Polynomial Algorithm for the $k$-Cut Problem for Fixed $k$},
|
||||||
|
journal = {Mathematics of Operations Research},
|
||||||
|
volume = {19},
|
||||||
|
number = {1},
|
||||||
|
pages = {24--37},
|
||||||
|
year = {1994},
|
||||||
|
doi = {10.1287/moor.19.1.24},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{berczi_blocking_2018,
|
||||||
|
author = {B\'{e}rczi, Krist\'{o}f and Bern\'{a}th, Attila and Kir\'{a}ly, Tam\'{a}s and Pap, Gyula},
|
||||||
|
title = {Blocking optimal structures},
|
||||||
|
journal = {Discrete Mathematics},
|
||||||
|
volume = {341},
|
||||||
|
number = {7},
|
||||||
|
pages = {1864--1872},
|
||||||
|
year = {2018},
|
||||||
|
doi = {10.1016/j.disc.2018.03.024},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{eren_rigidity_2004,
|
||||||
|
author = {Eren, Tolga and Goldenberg, D. K. and Whiteley, Walter and Yang, Yang Richard and Belhumeur, Peter N. and Anderson, Brian D. O. and Morse, A. Stephen},
|
||||||
|
title = {Rigidity, computation, and randomization in network localization},
|
||||||
|
booktitle = {Proceedings of IEEE INFOCOM},
|
||||||
|
year = {2004},
|
||||||
|
doi = {10.1109/INFCOM.2004.1354686},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{yu_agent_2008,
|
||||||
|
author = {Yu, Changbin and Anderson, Brian D. O.},
|
||||||
|
title = {Agent and link redundancy for autonomous formations},
|
||||||
|
booktitle = {Proceedings of the 17th World Congress, International Federation of Automatic Control},
|
||||||
|
year = {2008},
|
||||||
|
doi = {10.3182/20080706-5-KR-1001.0554},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{jacobs_protein_2001,
|
||||||
|
author = {Jacobs, Donald J. and Rader, A. J. and Kuhn, Leslie A. and Thorpe, M. F.},
|
||||||
|
title = {Protein flexibility predictions using graph theory},
|
||||||
|
journal = {Proteins: Structure, Function, and Bioinformatics},
|
||||||
|
volume = {44},
|
||||||
|
number = {2},
|
||||||
|
pages = {150--165},
|
||||||
|
year = {2001},
|
||||||
|
doi = {10.1002/prot.1081},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{thorup_minimum_2008,
|
||||||
|
author = {Thorup, Mikkel},
|
||||||
|
title = {Minimum $k$-way cuts via deterministic greedy tree packing},
|
||||||
|
booktitle = {Proceedings of the 40th Annual ACM Symposium on Theory of Computing},
|
||||||
|
pages = {159--166},
|
||||||
|
year = {2008},
|
||||||
|
doi = {10.1145/1374376.1374402},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{gupta_optimal_2022,
|
||||||
|
author = {Gupta, Anupam and Harris, David G. and Lee, Euiwoong and Li, Jason},
|
||||||
|
title = {Optimal Bounds for the $k$-cut Problem},
|
||||||
|
journal = {Journal of the ACM},
|
||||||
|
volume = {69},
|
||||||
|
number = {1},
|
||||||
|
pages = {1--18},
|
||||||
|
year = {2022},
|
||||||
|
doi = {10.1145/3478018},
|
||||||
|
}
|
||||||
|
|
||||||
|
@techreport{jordan_combinatorial_2014,
|
||||||
|
address = {Budapest, Hungary},
|
||||||
|
title = {Combinatorial rigidity: graphs and matroids in the theory of rigid frameworks},
|
||||||
|
issn = {1587–4451},
|
||||||
|
url = {http://www.cs.elte.hu/egres},
|
||||||
|
language = {en},
|
||||||
|
number = {TR-2014-12},
|
||||||
|
institution = {Egerváry Research Group},
|
||||||
|
author = {Jordán, Tibor},
|
||||||
|
month = sep,
|
||||||
|
year = {2014},
|
||||||
|
file = {Jordan - COMBINATORIAL RIGIDITY GRAPHS AND MATROIDS IN THE.pdf:/Users/congyu/Zotero/storage/RRMXGIDT/Jordan - COMBINATORIAL RIGIDITY GRAPHS AND MATROIDS IN THE.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
@article{graver_rigidity_1991,
|
||||||
|
title = {Rigidity {Matroids}},
|
||||||
|
volume = {4},
|
||||||
|
issn = {0895-4801, 1095-7146},
|
||||||
|
url = {http://epubs.siam.org/doi/10.1137/0404032},
|
||||||
|
doi = {10.1137/0404032},
|
||||||
|
abstract = {This paper begins with a short discussion of the general principles of Rigidity Theory. The main interest is the combinatorial part ofthis subject: generic rigidity. While generic rigidity has several combinatorial characterizations in dimensions one and two, these characterizations have not been able to be extended to characterizations of generic rigidity in higher dimensions. In fact, no "purely combinatorial" characterization is presently known for generic rigidity in dimensions three and up. The concept of an abstract rigidity matroid is introduced and, in the context of matroid theory, the present status of the characterization problem is discussed.},
|
||||||
|
language = {en},
|
||||||
|
number = {3},
|
||||||
|
urldate = {2025-10-10},
|
||||||
|
journal = {SIAM Journal on Discrete Mathematics},
|
||||||
|
author = {Graver, Jack E.},
|
||||||
|
month = aug,
|
||||||
|
year = {1991},
|
||||||
|
pages = {355--368},
|
||||||
|
file = {PDF:/Users/congyu/Zotero/storage/GPKMUI3Y/Graver - 1991 - Rigidity Matroids.pdf:application/pdf},
|
||||||
|
}
|
||||||
|
|
||||||
+607
@@ -0,0 +1,607 @@
|
|||||||
|
{-# LANGUAGE BlockArguments #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE StandaloneKindSignatures #-}
|
||||||
|
|
||||||
|
module ChaoDoc (chaoDocRead, chaoDocWrite, chaoDocPandocCompiler, chaoDocCompiler) where
|
||||||
|
|
||||||
|
import Control.Monad.State
|
||||||
|
import Data.Char (isAlphaNum, isSpace)
|
||||||
|
import Data.Kind (Type)
|
||||||
|
import Data.Either
|
||||||
|
import Data.Functor
|
||||||
|
import Data.List (intersect, stripPrefix)
|
||||||
|
import qualified Data.Map as M
|
||||||
|
import Data.Maybe
|
||||||
|
import Data.Text (Text, pack)
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import Hakyll
|
||||||
|
import Pangu (isCJK, pangu)
|
||||||
|
import SideNoteHTML (usingSideNotesHTML)
|
||||||
|
import System.IO.Unsafe
|
||||||
|
import Text.Pandoc
|
||||||
|
-- import Text.Pandoc.Builder
|
||||||
|
import Text.Pandoc.Citeproc
|
||||||
|
import Text.Pandoc.Walk (query, walk, walkM)
|
||||||
|
|
||||||
|
-- setMeta key val (Pandoc (Meta ms) bs) = Pandoc (Meta $ M.insert key val ms) bs
|
||||||
|
|
||||||
|
-- On mac, please do `export LANG=C` before using this thing
|
||||||
|
chaoDocRead :: ReaderOptions
|
||||||
|
chaoDocRead =
|
||||||
|
def
|
||||||
|
{ readerExtensions =
|
||||||
|
enableExtension Ext_lists_without_preceding_blankline $
|
||||||
|
enableExtension Ext_wikilinks_title_after_pipe $
|
||||||
|
enableExtension Ext_tex_math_double_backslash $
|
||||||
|
enableExtension Ext_tex_math_single_backslash $
|
||||||
|
enableExtension Ext_latex_macros $
|
||||||
|
enableExtension Ext_raw_tex $
|
||||||
|
disableExtension Ext_blank_before_header pandocExtensions
|
||||||
|
}
|
||||||
|
|
||||||
|
chaoDocWrite :: WriterOptions
|
||||||
|
chaoDocWrite =
|
||||||
|
def
|
||||||
|
{ writerHTMLMathMethod = MathML,
|
||||||
|
-- writerHtml5 = True,
|
||||||
|
-- writerHighlightStyle = Just syntaxHighlightingStyle,
|
||||||
|
writerNumberSections = True,
|
||||||
|
writerTableOfContents = True,
|
||||||
|
writerTOCDepth = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
-- getInline :: Inline -> [Inline]
|
||||||
|
-- getInline x = [x]
|
||||||
|
|
||||||
|
pandocToInline :: Pandoc -> [Inline]
|
||||||
|
pandocToInline (Pandoc _ blocks) = go (reverse blocks)
|
||||||
|
where
|
||||||
|
go (Plain inlines : _) = inlines
|
||||||
|
go (Para inlines : _) = inlines
|
||||||
|
go (_ : xs) = go xs
|
||||||
|
go [] = []
|
||||||
|
|
||||||
|
incrementalBlock :: [Text]
|
||||||
|
incrementalBlock =
|
||||||
|
[ "Theorem",
|
||||||
|
"Conjecture",
|
||||||
|
"Definition",
|
||||||
|
"Example",
|
||||||
|
"Lemma",
|
||||||
|
"Problem",
|
||||||
|
"Proposition",
|
||||||
|
"Corollary",
|
||||||
|
"Observation",
|
||||||
|
"Claim",
|
||||||
|
"定理",
|
||||||
|
"猜想",
|
||||||
|
"定义",
|
||||||
|
"例",
|
||||||
|
"引理",
|
||||||
|
"问题",
|
||||||
|
"命题",
|
||||||
|
"推论",
|
||||||
|
"观察"
|
||||||
|
]
|
||||||
|
|
||||||
|
otherBlock :: [Text]
|
||||||
|
otherBlock = ["Proof", "Remark", "证明", "备注"]
|
||||||
|
|
||||||
|
theoremClasses :: [Text]
|
||||||
|
theoremClasses = incrementalBlock ++ otherBlock
|
||||||
|
|
||||||
|
canonicalTheoremType :: Text -> Maybe Text
|
||||||
|
canonicalTheoremType raw =
|
||||||
|
listToMaybe
|
||||||
|
[ cls
|
||||||
|
| cls <- theoremClasses,
|
||||||
|
T.toCaseFold cls == T.toCaseFold (T.strip raw)
|
||||||
|
]
|
||||||
|
|
||||||
|
-- create a filter for theorems
|
||||||
|
getClass :: Attr -> [Text]
|
||||||
|
getClass (_, c, _) = c
|
||||||
|
|
||||||
|
addClass :: Attr -> Text -> Attr
|
||||||
|
addClass (a, b, c) d = (a, d : b, c)
|
||||||
|
|
||||||
|
addAttr :: Attr -> Text -> Text -> Attr
|
||||||
|
addAttr (a, b, c) x y = (a, b, (x, y) : c)
|
||||||
|
|
||||||
|
-- For each theorem, add a number, and also add add class theorem
|
||||||
|
preprocessTheorems :: Block -> State Int Block
|
||||||
|
preprocessTheorems (Div attr xs)
|
||||||
|
| isIncremental = do
|
||||||
|
curId <- get
|
||||||
|
put (curId + 1)
|
||||||
|
return $ Div (addAttr attr' "index" (pack $ show curId)) xs
|
||||||
|
| isOtherBlock = return $ Div attr' xs
|
||||||
|
| otherwise = return (Div attr xs)
|
||||||
|
where
|
||||||
|
isIncremental = getClass attr `intersect` incrementalBlock /= []
|
||||||
|
isOtherBlock = getClass attr `intersect` otherBlock /= []
|
||||||
|
theoremType = head (getClass attr `intersect` theoremClasses)
|
||||||
|
attr' = addAttr attr "type" theoremType
|
||||||
|
preprocessTheorems x = return x
|
||||||
|
|
||||||
|
theoremFilter :: Pandoc -> Pandoc
|
||||||
|
theoremFilter doc =
|
||||||
|
walk makeTheorem $
|
||||||
|
autorefFilter $
|
||||||
|
evalState (walkM preprocessTheorems normalizedDoc) 1
|
||||||
|
where
|
||||||
|
normalizedDoc = obsidianTheoremFilter doc
|
||||||
|
|
||||||
|
-- [idx, type, maybe index]
|
||||||
|
theoremIndex :: Block -> [(Text, (Text, Maybe Text))]
|
||||||
|
theoremIndex (Div attr _)
|
||||||
|
| isNothing t = []
|
||||||
|
| T.null idx = []
|
||||||
|
| otherwise = [(idx, (fromJust t, index))]
|
||||||
|
where
|
||||||
|
(idx, _, parm) = attr
|
||||||
|
t = lookup "type" parm
|
||||||
|
index = lookup "index" parm
|
||||||
|
theoremIndex _ = []
|
||||||
|
|
||||||
|
theoremLink :: [(Text, (Text, Maybe Text))] -> Text -> Maybe Inline
|
||||||
|
theoremLink refs blockId = do
|
||||||
|
(theoremType, index) <- lookup blockId refs
|
||||||
|
let num = fromMaybe "" index
|
||||||
|
linkTitle
|
||||||
|
| T.null num = theoremType
|
||||||
|
| otherwise = theoremType <> " " <> num
|
||||||
|
return $ Link nullAttr [Str linkTitle] ("#" <> blockId, linkTitle)
|
||||||
|
|
||||||
|
autoref :: [(Text, (Text, Maybe Text))] -> Inline -> Inline
|
||||||
|
autoref x (Cite citations inlines)
|
||||||
|
| Just link <- theoremLink x citeid = link
|
||||||
|
| otherwise = Cite citations inlines
|
||||||
|
where
|
||||||
|
citeid = citationId $ head citations
|
||||||
|
autoref x (Link attr inlines (target, title))
|
||||||
|
| Just blockId <- T.stripPrefix "#^" target,
|
||||||
|
Just link <- theoremLink x blockId =
|
||||||
|
link
|
||||||
|
| otherwise = Link attr inlines (target, title)
|
||||||
|
autoref _ y = y
|
||||||
|
|
||||||
|
autorefFilter :: Pandoc -> Pandoc
|
||||||
|
autorefFilter x = walk (autoref links) x
|
||||||
|
where
|
||||||
|
links = query theoremIndex x
|
||||||
|
|
||||||
|
-- processCitations works on AST. If you want to use citations in theorem name,
|
||||||
|
-- then you need to convert citations there to AST as well and then use processCitations\
|
||||||
|
-- Thus one need to apply the theorem filter first.
|
||||||
|
-- autoref still does not work.
|
||||||
|
mathMacros :: Text
|
||||||
|
mathMacros = unsafePerformIO (pack <$> readFile "math-macros.md")
|
||||||
|
{-# NOINLINE mathMacros #-}
|
||||||
|
|
||||||
|
prependMacros :: Text -> Text -> Text
|
||||||
|
prependMacros macros body = macros <> "\n\n" <> body
|
||||||
|
|
||||||
|
prependMathMacros :: Text -> Text
|
||||||
|
prependMathMacros = prependMacros mathMacros
|
||||||
|
|
||||||
|
thmNamePandoc :: Text -> Pandoc
|
||||||
|
thmNamePandoc x =
|
||||||
|
fromRight (Pandoc nullMeta []) . runPure $
|
||||||
|
readMarkdown chaoDocRead (prependMathMacros x)
|
||||||
|
|
||||||
|
obsidianTheoremFilter :: Pandoc -> Pandoc
|
||||||
|
obsidianTheoremFilter = attachStandaloneLabels . walk rewriteObsidianBlockQuote
|
||||||
|
|
||||||
|
rewriteObsidianBlockQuote :: Block -> Block
|
||||||
|
rewriteObsidianBlockQuote block@(BlockQuote quoteBlocks) = fromMaybe block do
|
||||||
|
(theoremType, title, bodyBlocks0) <- parseObsidianTheorem quoteBlocks
|
||||||
|
let (bodyBlocks, label) = stripTrailingLabel bodyBlocks0
|
||||||
|
attrs :: [(Text, Text)]
|
||||||
|
attrs = maybe [] (\x -> [("title", x)]) title
|
||||||
|
return $ Div (fromMaybe "" label, [theoremType], attrs) bodyBlocks
|
||||||
|
rewriteObsidianBlockQuote block = block
|
||||||
|
|
||||||
|
attachStandaloneLabels :: Pandoc -> Pandoc
|
||||||
|
attachStandaloneLabels (Pandoc meta blocks) = Pandoc meta (attachLabels blocks)
|
||||||
|
|
||||||
|
attachLabels :: [Block] -> [Block]
|
||||||
|
attachLabels (Div attr xs : next : rest)
|
||||||
|
| isTheoremAttr attr,
|
||||||
|
T.null blockId =
|
||||||
|
case blockLabel next of
|
||||||
|
Just label -> Div (setAttrId attr label) xs : attachLabels rest
|
||||||
|
Nothing -> Div attr xs : attachLabels (next : rest)
|
||||||
|
where
|
||||||
|
(blockId, _, _) = attr
|
||||||
|
attachLabels (x : xs) = x : attachLabels xs
|
||||||
|
attachLabels [] = []
|
||||||
|
|
||||||
|
isTheoremAttr :: Attr -> Bool
|
||||||
|
isTheoremAttr attr = getClass attr `intersect` theoremClasses /= []
|
||||||
|
|
||||||
|
setAttrId :: Attr -> Text -> Attr
|
||||||
|
setAttrId (_, classes, attrs) blockId = (blockId, classes, attrs)
|
||||||
|
|
||||||
|
parseObsidianTheorem :: [Block] -> Maybe (Text, Maybe Text, [Block])
|
||||||
|
parseObsidianTheorem [] = Nothing
|
||||||
|
parseObsidianTheorem (x : xs) = do
|
||||||
|
(theoremType, title, bodyStart) <- parseCalloutHeader x
|
||||||
|
return (theoremType, title, maybeToList bodyStart ++ xs)
|
||||||
|
|
||||||
|
parseCalloutHeader :: Block -> Maybe (Text, Maybe Text, Maybe Block)
|
||||||
|
parseCalloutHeader (Para inlines) = parseCalloutHeaderWith Para inlines
|
||||||
|
parseCalloutHeader (Plain inlines) = parseCalloutHeaderWith Plain inlines
|
||||||
|
parseCalloutHeader _ = Nothing
|
||||||
|
|
||||||
|
parseCalloutHeaderWith :: ([Inline] -> Block) -> [Inline] -> Maybe (Text, Maybe Text, Maybe Block)
|
||||||
|
parseCalloutHeaderWith mkBlock inlines = do
|
||||||
|
(theoremType, rest) <- parseCalloutPrefix inlines
|
||||||
|
let (titleInlines, bodyInlines) = splitAtFirstBreak rest
|
||||||
|
titleMarkdown = inlineMarkdown $ trimInlineSpaces titleInlines
|
||||||
|
bodyBlock = nonEmptyBlock mkBlock $ trimInlineSpaces bodyInlines
|
||||||
|
title =
|
||||||
|
if T.null titleMarkdown
|
||||||
|
then Nothing
|
||||||
|
else Just titleMarkdown
|
||||||
|
return (theoremType, title, bodyBlock)
|
||||||
|
|
||||||
|
parseCalloutPrefix :: [Inline] -> Maybe (Text, [Inline])
|
||||||
|
parseCalloutPrefix (Str marker : rest) = do
|
||||||
|
theoremType <- canonicalTheoremType =<< (T.stripPrefix "[!" marker >>= T.stripSuffix "]")
|
||||||
|
return (theoremType, dropLeadingInlineSpaces rest)
|
||||||
|
parseCalloutPrefix _ = Nothing
|
||||||
|
|
||||||
|
stripTrailingLabel :: [Block] -> ([Block], Maybe Text)
|
||||||
|
stripTrailingLabel [] = ([], Nothing)
|
||||||
|
stripTrailingLabel blocks = case unsnoc blocks of
|
||||||
|
Nothing -> (blocks, Nothing)
|
||||||
|
Just (prefix, lastBlock) -> case splitBlockLabel lastBlock of
|
||||||
|
Nothing -> (blocks, Nothing)
|
||||||
|
Just (cleaned, label) -> (prefix ++ maybeToList cleaned, Just label)
|
||||||
|
|
||||||
|
blockLabel :: Block -> Maybe Text
|
||||||
|
blockLabel block = case splitBlockLabel block of
|
||||||
|
Just (Nothing, label) -> Just label
|
||||||
|
_ -> Nothing
|
||||||
|
|
||||||
|
splitBlockLabel :: Block -> Maybe (Maybe Block, Text)
|
||||||
|
splitBlockLabel (Para inlines) = splitBlockLabelWith Para inlines
|
||||||
|
splitBlockLabel (Plain inlines) = splitBlockLabelWith Plain inlines
|
||||||
|
splitBlockLabel (BlockQuote blocks) = splitBlockLabelWithBlocks BlockQuote blocks
|
||||||
|
splitBlockLabel (Div attr blocks) = splitBlockLabelWithBlocks (Div attr) blocks
|
||||||
|
splitBlockLabel (BulletList items) = do
|
||||||
|
(items', label) <- splitBlockLabelItems items
|
||||||
|
return (nonEmptyListBlock BulletList items', label)
|
||||||
|
splitBlockLabel (OrderedList attrs items) = do
|
||||||
|
(items', label) <- splitBlockLabelItems items
|
||||||
|
return (nonEmptyListBlock (OrderedList attrs) items', label)
|
||||||
|
splitBlockLabel _ = Nothing
|
||||||
|
|
||||||
|
splitBlockLabelWith :: ([Inline] -> Block) -> [Inline] -> Maybe (Maybe Block, Text)
|
||||||
|
splitBlockLabelWith mkBlock inlines = do
|
||||||
|
(prefix, label) <- splitTrailingLabelLine inlines
|
||||||
|
return (nonEmptyBlock mkBlock prefix, label)
|
||||||
|
|
||||||
|
splitBlockLabelWithBlocks :: ([Block] -> Block) -> [Block] -> Maybe (Maybe Block, Text)
|
||||||
|
splitBlockLabelWithBlocks mkBlock blocks = do
|
||||||
|
let (blocks', label) = stripTrailingLabel blocks
|
||||||
|
label' <- label
|
||||||
|
return (nonEmptyBlocks mkBlock blocks', label')
|
||||||
|
|
||||||
|
splitBlockLabelItems :: [[Block]] -> Maybe ([[Block]], Text)
|
||||||
|
splitBlockLabelItems items = do
|
||||||
|
(prefixItems, lastItem) <- unsnoc items
|
||||||
|
let (lastItem', label) = stripTrailingLabel lastItem
|
||||||
|
label' <- label
|
||||||
|
return (prefixItems ++ maybeToList (nonEmptyListItem lastItem'), label')
|
||||||
|
|
||||||
|
splitTrailingLabelLine :: [Inline] -> Maybe ([Inline], Text)
|
||||||
|
splitTrailingLabelLine inlines = do
|
||||||
|
(prefixLines, lastLine) <- unsnoc (inlineLines inlines)
|
||||||
|
label <- labelFromInlines lastLine
|
||||||
|
return (joinInlineLines prefixLines, label)
|
||||||
|
|
||||||
|
labelFromInlines :: [Inline] -> Maybe Text
|
||||||
|
labelFromInlines [Str s] = labelFromText s
|
||||||
|
labelFromInlines _ = Nothing
|
||||||
|
|
||||||
|
labelFromText :: Text -> Maybe Text
|
||||||
|
labelFromText s = do
|
||||||
|
label <- T.stripPrefix "^" (T.strip s)
|
||||||
|
if T.null label then Nothing else Just label
|
||||||
|
|
||||||
|
inlineLines :: [Inline] -> [[Inline]]
|
||||||
|
inlineLines = foldr step [[]]
|
||||||
|
where
|
||||||
|
step SoftBreak acc = [] : acc
|
||||||
|
step LineBreak acc = [] : acc
|
||||||
|
step x (line : rest) = (x : line) : rest
|
||||||
|
step _ [] = [[]]
|
||||||
|
|
||||||
|
joinInlineLines :: [[Inline]] -> [Inline]
|
||||||
|
joinInlineLines [] = []
|
||||||
|
joinInlineLines (x : xs) = x ++ concatMap (SoftBreak :) xs
|
||||||
|
|
||||||
|
inlineMarkdownWrite :: WriterOptions
|
||||||
|
inlineMarkdownWrite =
|
||||||
|
def
|
||||||
|
{ writerExtensions = readerExtensions chaoDocRead
|
||||||
|
}
|
||||||
|
|
||||||
|
inlineMarkdown :: [Inline] -> Text
|
||||||
|
inlineMarkdown inlines =
|
||||||
|
T.strip $
|
||||||
|
fromRight "" $
|
||||||
|
runPure $
|
||||||
|
writeMarkdown inlineMarkdownWrite (Pandoc nullMeta [Plain inlines])
|
||||||
|
|
||||||
|
splitAtFirstBreak :: [Inline] -> ([Inline], [Inline])
|
||||||
|
splitAtFirstBreak = go []
|
||||||
|
where
|
||||||
|
go acc [] = (reverse acc, [])
|
||||||
|
go acc (SoftBreak : xs) = (reverse acc, xs)
|
||||||
|
go acc (LineBreak : xs) = (reverse acc, xs)
|
||||||
|
go acc (x : xs) = go (x : acc) xs
|
||||||
|
|
||||||
|
trimInlineSpaces :: [Inline] -> [Inline]
|
||||||
|
trimInlineSpaces = dropTrailingInlineSpaces . dropLeadingInlineSpaces
|
||||||
|
|
||||||
|
dropLeadingInlineSpaces :: [Inline] -> [Inline]
|
||||||
|
dropLeadingInlineSpaces = dropWhile isInlineSpace
|
||||||
|
|
||||||
|
dropTrailingInlineSpaces :: [Inline] -> [Inline]
|
||||||
|
dropTrailingInlineSpaces = reverse . dropWhile isInlineSpace . reverse
|
||||||
|
|
||||||
|
isInlineSpace :: Inline -> Bool
|
||||||
|
isInlineSpace Space = True
|
||||||
|
isInlineSpace _ = False
|
||||||
|
|
||||||
|
nonEmptyBlock :: ([Inline] -> Block) -> [Inline] -> Maybe Block
|
||||||
|
nonEmptyBlock mkBlock inlines
|
||||||
|
| (not . all isWhitespaceInline) inlines = Just (mkBlock inlines)
|
||||||
|
| otherwise = Nothing
|
||||||
|
|
||||||
|
nonEmptyBlocks :: ([Block] -> Block) -> [Block] -> Maybe Block
|
||||||
|
nonEmptyBlocks mkBlock blocks
|
||||||
|
| null blocks = Nothing
|
||||||
|
| otherwise = Just (mkBlock blocks)
|
||||||
|
|
||||||
|
nonEmptyListBlock :: ([[Block]] -> Block) -> [[Block]] -> Maybe Block
|
||||||
|
nonEmptyListBlock mkBlock items
|
||||||
|
| null items = Nothing
|
||||||
|
| otherwise = Just (mkBlock items)
|
||||||
|
|
||||||
|
nonEmptyListItem :: [Block] -> Maybe [Block]
|
||||||
|
nonEmptyListItem []
|
||||||
|
= Nothing
|
||||||
|
nonEmptyListItem blocks = Just blocks
|
||||||
|
|
||||||
|
isWhitespaceInline :: Inline -> Bool
|
||||||
|
isWhitespaceInline Space = True
|
||||||
|
isWhitespaceInline SoftBreak = True
|
||||||
|
isWhitespaceInline LineBreak = True
|
||||||
|
isWhitespaceInline _ = False
|
||||||
|
|
||||||
|
unsnoc :: [a] -> Maybe ([a], a)
|
||||||
|
unsnoc [] = Nothing
|
||||||
|
unsnoc [x] = Just ([], x)
|
||||||
|
unsnoc (x : xs) = do
|
||||||
|
(prefix, lastElem) <- unsnoc xs
|
||||||
|
return (x : prefix, lastElem)
|
||||||
|
|
||||||
|
makeTheorem :: Block -> Block
|
||||||
|
makeTheorem (Div attr xs)
|
||||||
|
| isNothing t = Div attr xs
|
||||||
|
| otherwise = Div (addClass attr "theorem-environment") (Plain [header] : xs)
|
||||||
|
where
|
||||||
|
(_, _, parm) = attr
|
||||||
|
t = lookup "type" parm
|
||||||
|
name = lookup "title" parm
|
||||||
|
index = lookup "index" parm
|
||||||
|
header = Span (addClass nullAttr "theorem-header") [typetext, indextext, nametext]
|
||||||
|
typetext = Span (addClass nullAttr "type") [Str $ fromJust t]
|
||||||
|
indextext =
|
||||||
|
if isNothing index
|
||||||
|
then Str ""
|
||||||
|
else Span (addClass nullAttr "index") [Str $ fromJust index]
|
||||||
|
nametext =
|
||||||
|
if isNothing name
|
||||||
|
then Str ""
|
||||||
|
else Span (addClass nullAttr "name") (pandocToInline $ thmNamePandoc $ fromJust name)
|
||||||
|
makeTheorem x = x
|
||||||
|
|
||||||
|
-- bib from https://github.com/chaoxu/chaoxu.github.io/tree/develop
|
||||||
|
cslFile :: String
|
||||||
|
cslFile = "bib_style.csl"
|
||||||
|
|
||||||
|
bibFile :: String
|
||||||
|
bibFile = "reference.bib"
|
||||||
|
|
||||||
|
chaoDocPandocCompiler :: Compiler (Item Pandoc)
|
||||||
|
chaoDocPandocCompiler = do
|
||||||
|
macros <- T.pack <$> loadBody "math-macros.md"
|
||||||
|
body <- getResourceBody
|
||||||
|
let bodyWithMacros =
|
||||||
|
fmap (T.unpack . prependMacros macros . T.pack) body
|
||||||
|
myReadPandocBiblio chaoDocRead (T.pack cslFile) (T.pack bibFile) myFilter bodyWithMacros
|
||||||
|
|
||||||
|
chaoDocCompiler :: Compiler (Item String)
|
||||||
|
chaoDocCompiler = chaoDocPandocCompiler <&> writePandocWith chaoDocWrite
|
||||||
|
|
||||||
|
addMeta :: T.Text -> MetaValue -> Pandoc -> Pandoc
|
||||||
|
addMeta name value (Pandoc meta a) =
|
||||||
|
let prevMap = unMeta meta
|
||||||
|
newMap = M.insert name value prevMap
|
||||||
|
newMeta = Meta newMap
|
||||||
|
in Pandoc newMeta a
|
||||||
|
|
||||||
|
myReadPandocBiblio ::
|
||||||
|
ReaderOptions ->
|
||||||
|
T.Text -> -- csl file name
|
||||||
|
T.Text ->
|
||||||
|
(Pandoc -> Pandoc) -> -- apply a filter before citeproc
|
||||||
|
Item String ->
|
||||||
|
Compiler (Item Pandoc)
|
||||||
|
myReadPandocBiblio ropt csl biblio pdfilter item = do
|
||||||
|
-- Parse CSL file, if given
|
||||||
|
-- style <- unsafeCompiler $ CSL.readCSLFile Nothing . toFilePath . itemIdentifier $ csl
|
||||||
|
|
||||||
|
-- We need to know the citation keys, add then *before* actually parsing the
|
||||||
|
-- actual page. If we don't do this, pandoc won't even consider them
|
||||||
|
-- citations!
|
||||||
|
-- let Biblio refs = itemBody biblio
|
||||||
|
pandoc <- itemBody <$> readPandocWith ropt item
|
||||||
|
let pandoc' =
|
||||||
|
fromRight pandoc $
|
||||||
|
unsafePerformIO $
|
||||||
|
runIO $
|
||||||
|
processCitations $
|
||||||
|
addMeta "bibliography" (MetaList [MetaString biblio]) $
|
||||||
|
addMeta "csl" (MetaString csl) $
|
||||||
|
addMeta "link-citations" (MetaBool True) $
|
||||||
|
addMeta "reference-section-title" (MetaInlines [Str "References"]) $
|
||||||
|
pdfilter pandoc -- here's the change
|
||||||
|
-- let a x = itemSetBody (pandoc' x)
|
||||||
|
return $ fmap (const pandoc') item
|
||||||
|
|
||||||
|
myFilter :: Pandoc -> Pandoc
|
||||||
|
myFilter = usingSideNotesHTML chaoDocWrite . theoremFilter . panguFilter . displayMathFilter
|
||||||
|
|
||||||
|
-- pangu filter
|
||||||
|
lastChar :: Inline -> Maybe Char
|
||||||
|
lastChar e = case e of
|
||||||
|
Str s -> if null (T.unpack s) then Nothing else Just (last (T.unpack s))
|
||||||
|
Emph is -> lastCharList is
|
||||||
|
Strong is -> lastCharList is
|
||||||
|
Strikeout is -> lastCharList is
|
||||||
|
Link _ is _ -> lastCharList is
|
||||||
|
Span _ is -> lastCharList is
|
||||||
|
Quoted _ is -> lastCharList is
|
||||||
|
_ -> Nothing
|
||||||
|
where
|
||||||
|
lastCharList [] = Nothing
|
||||||
|
lastCharList is = lastChar (last is)
|
||||||
|
|
||||||
|
firstChar :: Inline -> Maybe Char
|
||||||
|
firstChar e = case e of
|
||||||
|
Str s -> if null (T.unpack s) then Nothing else Just (head (T.unpack s))
|
||||||
|
Emph is -> firstCharList is
|
||||||
|
Strong is -> firstCharList is
|
||||||
|
Strikeout is -> firstCharList is
|
||||||
|
Link _ is _ -> firstCharList is
|
||||||
|
Span _ is -> firstCharList is
|
||||||
|
Quoted _ is -> firstCharList is
|
||||||
|
_ -> Nothing
|
||||||
|
where
|
||||||
|
firstCharList [] = Nothing
|
||||||
|
firstCharList is = firstChar (head is)
|
||||||
|
|
||||||
|
panguInline :: Inline -> Inline
|
||||||
|
panguInline e = case e of
|
||||||
|
Str s -> Str (pangu s)
|
||||||
|
Emph is -> Emph (panguInlines is)
|
||||||
|
Strong is -> Strong (panguInlines is)
|
||||||
|
Strikeout is -> Strikeout (panguInlines is)
|
||||||
|
Link at is tg -> Link at (panguInlines is) tg
|
||||||
|
Span at is -> Span at (panguInlines is)
|
||||||
|
Quoted qt is -> Quoted qt (panguInlines is)
|
||||||
|
_ -> e
|
||||||
|
|
||||||
|
panguInlines :: [Inline] -> [Inline]
|
||||||
|
panguInlines = foldr (addSpace . panguInline) []
|
||||||
|
where
|
||||||
|
addSpace x [] = [x]
|
||||||
|
addSpace x (y : ys)
|
||||||
|
| shouldSpace x y = x : Space : y : ys
|
||||||
|
| otherwise = x : y : ys
|
||||||
|
shouldSpace x y = case (lastChar x, firstChar y) of
|
||||||
|
(Just lc, Just fc) -> isCJK lc /= isCJK fc
|
||||||
|
_ -> False
|
||||||
|
|
||||||
|
panguFilter :: Pandoc -> Pandoc
|
||||||
|
panguFilter = walk transformBlocks
|
||||||
|
where
|
||||||
|
transformBlocks :: Block -> Block
|
||||||
|
transformBlocks (Para inlines) = Para (panguInlines inlines)
|
||||||
|
transformBlocks x = x
|
||||||
|
|
||||||
|
type MathTag :: Type
|
||||||
|
data MathTag = MathTag
|
||||||
|
{ mathTagStarred :: Bool,
|
||||||
|
mathTagBody :: Text
|
||||||
|
}
|
||||||
|
|
||||||
|
extractMathTag :: Text -> Maybe (Text, MathTag)
|
||||||
|
extractMathTag source = do
|
||||||
|
(mathBody, starred, tagBody) <- findTrailingMathTag (T.unpack source)
|
||||||
|
return
|
||||||
|
( T.stripEnd (T.pack mathBody),
|
||||||
|
MathTag starred (T.strip (T.pack tagBody))
|
||||||
|
)
|
||||||
|
|
||||||
|
findTrailingMathTag :: String -> Maybe (String, Bool, String)
|
||||||
|
findTrailingMathTag = go [] Nothing
|
||||||
|
where
|
||||||
|
go :: String -> Maybe (String, Bool, String) -> String -> Maybe (String, Bool, String)
|
||||||
|
go _ best [] = best
|
||||||
|
go prefix best rest@(c : cs) =
|
||||||
|
go (c : prefix) best' cs
|
||||||
|
where
|
||||||
|
best' = case parseMathTagPrefix rest of
|
||||||
|
Just (starred, tagBody, suffix)
|
||||||
|
| all isSpace suffix -> Just (reverse prefix, starred, tagBody)
|
||||||
|
_ -> best
|
||||||
|
|
||||||
|
parseMathTagPrefix :: String -> Maybe (Bool, String, String)
|
||||||
|
parseMathTagPrefix s
|
||||||
|
| Just rest <- stripPrefix "\\tag*{" s = do
|
||||||
|
(tagBody, suffix) <- parseBalancedBraces 1 [] rest
|
||||||
|
return (True, tagBody, suffix)
|
||||||
|
| Just rest <- stripPrefix "\\tag{" s = do
|
||||||
|
(tagBody, suffix) <- parseBalancedBraces 1 [] rest
|
||||||
|
return (False, tagBody, suffix)
|
||||||
|
| otherwise = Nothing
|
||||||
|
|
||||||
|
parseBalancedBraces :: Int -> String -> String -> Maybe (String, String)
|
||||||
|
parseBalancedBraces _ _ [] = Nothing
|
||||||
|
parseBalancedBraces depth acc ('\\' : x : xs)
|
||||||
|
| x `elem` ['{', '}', '\\'] = parseBalancedBraces depth (x : '\\' : acc) xs
|
||||||
|
parseBalancedBraces depth acc ('{' : xs) =
|
||||||
|
parseBalancedBraces (depth + 1) ('{' : acc) xs
|
||||||
|
parseBalancedBraces depth acc ('}' : xs)
|
||||||
|
| depth == 1 = Just (reverse acc, xs)
|
||||||
|
| otherwise = parseBalancedBraces (depth - 1) ('}' : acc) xs
|
||||||
|
parseBalancedBraces depth acc (x : xs) =
|
||||||
|
parseBalancedBraces depth (x : acc) xs
|
||||||
|
|
||||||
|
isPlainMathTag :: Text -> Bool
|
||||||
|
isPlainMathTag =
|
||||||
|
T.all \c ->
|
||||||
|
isAlphaNum c || c `elem` ("*-.:()[] " :: String)
|
||||||
|
|
||||||
|
mathTagInlines :: MathTag -> [Inline]
|
||||||
|
mathTagInlines tag
|
||||||
|
| mathTagStarred tag = tagBody
|
||||||
|
| otherwise = [Str "("] ++ tagBody ++ [Str ")"]
|
||||||
|
where
|
||||||
|
tagText = mathTagBody tag
|
||||||
|
tagBody
|
||||||
|
| isPlainMathTag tagText = [Str tagText]
|
||||||
|
| otherwise = [Math InlineMath tagText]
|
||||||
|
|
||||||
|
-- display math wrapper for MathML
|
||||||
|
displayMathFilter :: Pandoc -> Pandoc
|
||||||
|
displayMathFilter = walk wrapDisplayMath
|
||||||
|
where
|
||||||
|
wrapDisplayMath (Math DisplayMath source)
|
||||||
|
| Just (mathBody, tag) <- extractMathTag source =
|
||||||
|
Span
|
||||||
|
("", ["math-container", "math-container-tagged"], [])
|
||||||
|
[ Span ("", ["math-tag-spacer"], []) [],
|
||||||
|
Span ("", ["math-equation"], []) [Math DisplayMath mathBody],
|
||||||
|
Span ("", ["math-tag"], []) (mathTagInlines tag)
|
||||||
|
]
|
||||||
|
| otherwise =
|
||||||
|
Span ("", ["math-container"], []) [Math DisplayMath source]
|
||||||
|
wrapDisplayMath x = x
|
||||||
+227
@@ -0,0 +1,227 @@
|
|||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
|
||||||
|
module Pangu (pangu, isCJK) where
|
||||||
|
|
||||||
|
import Data.Function (fix)
|
||||||
|
import Data.Text (Text)
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import Data.Void (Void)
|
||||||
|
import Replace.Megaparsec (streamEdit)
|
||||||
|
import Text.Megaparsec
|
||||||
|
import Text.Megaparsec.Char
|
||||||
|
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
type Parser = Parsec Void Text
|
||||||
|
|
||||||
|
type Rule = Parser Text
|
||||||
|
|
||||||
|
type RuleSet = [Rule]
|
||||||
|
|
||||||
|
applyUntilFixed :: Rule -> Text -> Text
|
||||||
|
applyUntilFixed rule =
|
||||||
|
fix
|
||||||
|
( \loop current ->
|
||||||
|
let next = streamEdit (try rule) id current
|
||||||
|
in if next == current then next else loop next
|
||||||
|
)
|
||||||
|
|
||||||
|
applyRulesRecursively :: RuleSet -> Text -> Text
|
||||||
|
applyRulesRecursively rules input = foldl (flip applyUntilFixed) input rules
|
||||||
|
|
||||||
|
applyRules :: RuleSet -> Text -> Text
|
||||||
|
applyRules rules input = foldl (flip applyOnce) input rules
|
||||||
|
where
|
||||||
|
applyOnce rule = streamEdit (try rule) id
|
||||||
|
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
-- rules for pangu
|
||||||
|
|
||||||
|
-- alphaNumChar from megaparsec matches CJK chars...
|
||||||
|
-- need to implement a new one
|
||||||
|
alphanumericChar :: Parser Char
|
||||||
|
alphanumericChar = satisfy $ \c ->
|
||||||
|
(c >= 'a' && c <= 'z')
|
||||||
|
|| (c >= 'A' && c <= 'Z')
|
||||||
|
|| (c >= '0' && c <= '9')
|
||||||
|
|
||||||
|
-- | Check if a character falls within the CJK ranges provided
|
||||||
|
isCJK :: Char -> Bool
|
||||||
|
isCJK c = any (\(start, end) -> c >= start && c <= end) cjkRanges
|
||||||
|
where
|
||||||
|
cjkRanges =
|
||||||
|
[ ('\x2e80', '\x2eff'),
|
||||||
|
('\x2f00', '\x2fdf'),
|
||||||
|
('\x3040', '\x309f'),
|
||||||
|
('\x30a0', '\x30fa'),
|
||||||
|
('\x30fc', '\x30ff'),
|
||||||
|
('\x3100', '\x312f'),
|
||||||
|
('\x3200', '\x32ff'),
|
||||||
|
('\x3400', '\x4dbf'),
|
||||||
|
('\x4e00', '\x9fff'),
|
||||||
|
('\xf900', '\xfaff')
|
||||||
|
]
|
||||||
|
|
||||||
|
convertToFullwidth :: Char -> Char
|
||||||
|
convertToFullwidth c =
|
||||||
|
case c of
|
||||||
|
':' -> ':'
|
||||||
|
'.' -> '。'
|
||||||
|
'~' -> '~'
|
||||||
|
'!' -> '!'
|
||||||
|
'?' -> '?'
|
||||||
|
',' -> ','
|
||||||
|
';' -> ';'
|
||||||
|
'\"' -> '”'
|
||||||
|
'\'' -> '’'
|
||||||
|
_ -> c
|
||||||
|
|
||||||
|
-- A parser that matches a single CJK character
|
||||||
|
cjkChar :: Parser Char
|
||||||
|
cjkChar = satisfy isCJK
|
||||||
|
|
||||||
|
-- use python.py as reference for these rules
|
||||||
|
|
||||||
|
fullwidthCJKsymCJK :: Rule
|
||||||
|
fullwidthCJKsymCJK = do
|
||||||
|
lcjk <- cjkChar
|
||||||
|
_ <- many (char ' ')
|
||||||
|
sym <- try (some (char ':')) <|> count 1 (char '.')
|
||||||
|
_ <- many (char ' ')
|
||||||
|
rcjk <- cjkChar
|
||||||
|
let transformedsym = map convertToFullwidth sym
|
||||||
|
return $ T.pack $ [lcjk] ++ transformedsym ++ [rcjk]
|
||||||
|
|
||||||
|
fullwidthCJKsym :: Rule
|
||||||
|
fullwidthCJKsym = do
|
||||||
|
cjk <- cjkChar
|
||||||
|
_ <- many (char ' ')
|
||||||
|
sym <- some $ oneOf ("~!?,;" :: [Char])
|
||||||
|
_ <- many (char ' ')
|
||||||
|
let transformedsym = T.pack $ map convertToFullwidth sym
|
||||||
|
return $ T.pack [cjk] <> transformedsym
|
||||||
|
|
||||||
|
dotsCJK :: Rule
|
||||||
|
dotsCJK = do
|
||||||
|
dots <- chunk "..." <|> chunk "…"
|
||||||
|
cjk <- cjkChar
|
||||||
|
return $ dots <> T.pack (" " ++ [cjk])
|
||||||
|
|
||||||
|
fixCJKcolAN :: Rule
|
||||||
|
fixCJKcolAN = do
|
||||||
|
cjk <- cjkChar
|
||||||
|
_ <- char ':'
|
||||||
|
an <- alphanumericChar
|
||||||
|
return $ T.pack $ [cjk] ++ ":" ++ [an]
|
||||||
|
|
||||||
|
-- quotes
|
||||||
|
-- seems confusing ...
|
||||||
|
quotesym :: [Char]
|
||||||
|
quotesym = "'`\x05f4\""
|
||||||
|
|
||||||
|
cjkquote :: Rule
|
||||||
|
cjkquote = do
|
||||||
|
cjk <- cjkChar
|
||||||
|
quote <- oneOf quotesym
|
||||||
|
return $ T.pack $ [cjk] ++ " " ++ [quote]
|
||||||
|
|
||||||
|
quoteCJK :: Rule
|
||||||
|
quoteCJK = do
|
||||||
|
quote <- oneOf quotesym
|
||||||
|
cjk <- cjkChar
|
||||||
|
return $ T.pack $ [quote] ++ " " ++ [cjk]
|
||||||
|
|
||||||
|
fixQuote :: Rule
|
||||||
|
fixQuote = do
|
||||||
|
openQuotes <- T.pack <$> some (oneOf quotesym)
|
||||||
|
_ <- many spaceChar
|
||||||
|
content <- T.pack <$> someTill anySingle (lookAhead $ some (oneOf quotesym))
|
||||||
|
closeQuotes <- T.pack <$> some (oneOf quotesym)
|
||||||
|
return $ openQuotes <> T.strip content <> closeQuotes
|
||||||
|
|
||||||
|
cjkpossessivequote :: Rule
|
||||||
|
cjkpossessivequote = do
|
||||||
|
cjk <- cjkChar
|
||||||
|
_ <- char '\''
|
||||||
|
_ <- lookAhead $ anySingleBut 's'
|
||||||
|
return $ T.pack $ cjk : " '"
|
||||||
|
|
||||||
|
-- This singlequoteCJK rule will turn '你好' into ' 你好'
|
||||||
|
-- which seems not desirable...
|
||||||
|
-- however, the behavior is aligned with python version
|
||||||
|
singlequoteCJK :: Rule
|
||||||
|
singlequoteCJK = do
|
||||||
|
_ <- char '\''
|
||||||
|
cjk <- cjkChar
|
||||||
|
return $ T.pack $ "' " ++ [cjk]
|
||||||
|
|
||||||
|
fixPossessivequote :: Rule
|
||||||
|
fixPossessivequote = do
|
||||||
|
pre <- cjkChar <|> alphanumericChar
|
||||||
|
_ <- some spaceChar
|
||||||
|
_ <- chunk "'s"
|
||||||
|
return $ T.pack $ pre : "'s"
|
||||||
|
|
||||||
|
-- hash
|
||||||
|
hashANSCJKhash :: Rule
|
||||||
|
hashANSCJKhash = do
|
||||||
|
cjk1 <- cjkChar
|
||||||
|
_ <- char '#'
|
||||||
|
mid <- some cjkChar
|
||||||
|
_ <- char '#'
|
||||||
|
cjk2 <- cjkChar
|
||||||
|
return $ T.pack $ [cjk1] ++ " #" ++ mid ++ "# " ++ [cjk2]
|
||||||
|
|
||||||
|
cjkhash :: Rule
|
||||||
|
cjkhash = do
|
||||||
|
cjk <- cjkChar
|
||||||
|
_ <- char '#'
|
||||||
|
_ <- lookAhead $ anySingleBut ' '
|
||||||
|
return $ T.pack $ cjk : " #"
|
||||||
|
|
||||||
|
hashcjk :: Rule
|
||||||
|
hashcjk = do
|
||||||
|
_ <- char '#'
|
||||||
|
_ <- lookAhead $ anySingleBut ' '
|
||||||
|
cjk <- cjkChar
|
||||||
|
return $ T.pack $ "# " ++ [cjk]
|
||||||
|
|
||||||
|
-- operators
|
||||||
|
cjkOPTan :: Rule
|
||||||
|
cjkOPTan = do
|
||||||
|
cjk <- cjkChar
|
||||||
|
opt <- oneOf ("+-=*/&|<>%" :: [Char])
|
||||||
|
an <- alphanumericChar
|
||||||
|
return $ T.pack [cjk, ' ', opt, ' ', an]
|
||||||
|
|
||||||
|
anOPTcjk :: Rule
|
||||||
|
anOPTcjk = do
|
||||||
|
an <- alphanumericChar
|
||||||
|
opt <- oneOf ("+-=*/&|<>%" :: [Char])
|
||||||
|
cjk <- cjkChar
|
||||||
|
return $ T.pack [an, ' ', opt, ' ', cjk]
|
||||||
|
|
||||||
|
-- slash/bracket rules are not implemented
|
||||||
|
|
||||||
|
-- CJK and alphanumeric without space
|
||||||
|
|
||||||
|
cjkans :: Rule
|
||||||
|
cjkans = do
|
||||||
|
cjk <- cjkChar
|
||||||
|
_ <- lookAhead (alphanumericChar <|> oneOf ("@$%^&*-+\\=|/" :: [Char]))
|
||||||
|
return $ T.pack [cjk, ' ']
|
||||||
|
|
||||||
|
anscjk :: Rule
|
||||||
|
anscjk = do
|
||||||
|
an <- alphanumericChar <|> oneOf ("~!$%^&*-+\\=|;:,./?" :: [Char])
|
||||||
|
_ <- lookAhead cjkChar
|
||||||
|
return $ T.pack [an, ' ']
|
||||||
|
|
||||||
|
-- rule set, the order matters
|
||||||
|
recursiveRules :: RuleSet
|
||||||
|
recursiveRules = [fullwidthCJKsymCJK, fullwidthCJKsym]
|
||||||
|
|
||||||
|
onepassRules :: RuleSet
|
||||||
|
onepassRules = [anscjk, cjkans]
|
||||||
|
|
||||||
|
pangu :: Text -> Text
|
||||||
|
pangu input = applyRules onepassRules $ applyRulesRecursively recursiveRules input
|
||||||
@@ -0,0 +1,161 @@
|
|||||||
|
{-# LANGUAGE BangPatterns #-}
|
||||||
|
{-# LANGUAGE DerivingStrategies #-}
|
||||||
|
{-# LANGUAGE LambdaCase #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE ScopedTypeVariables #-}
|
||||||
|
{- |
|
||||||
|
Module : Text.Pandoc.SideNoteHTML
|
||||||
|
Description : Convert pandoc footnotes to sidenotes
|
||||||
|
Copyright : (c) Tony Zorman 2023
|
||||||
|
License : MIT
|
||||||
|
Maintainer : Tony Zorman <soliditsallgood@mailbox.org>
|
||||||
|
Stability : experimental
|
||||||
|
Portability : non-portable
|
||||||
|
-}
|
||||||
|
module SideNoteHTML (usingSideNotesHTML) where
|
||||||
|
|
||||||
|
import Control.Monad (foldM)
|
||||||
|
import Control.Monad.State (State, get, modify', runState)
|
||||||
|
import Data.Text (Text)
|
||||||
|
import Text.Pandoc (runPure, writeHtml5String)
|
||||||
|
import Text.Pandoc.Definition (Block (..), Inline (..), Pandoc (..))
|
||||||
|
import Text.Pandoc.Options (WriterOptions)
|
||||||
|
import Text.Pandoc.Shared (tshow)
|
||||||
|
import Text.Pandoc.Walk (walkM)
|
||||||
|
import qualified Data.Text as T
|
||||||
|
|
||||||
|
-- type NoteType :: Type
|
||||||
|
data NoteType = Sidenote | Marginnote
|
||||||
|
deriving stock (Show, Eq)
|
||||||
|
|
||||||
|
-- type SidenoteState :: Type
|
||||||
|
data SidenoteState = SNS
|
||||||
|
{ _writer :: !WriterOptions
|
||||||
|
, counter :: !Int
|
||||||
|
}
|
||||||
|
|
||||||
|
-- type Sidenote :: Type -> Type
|
||||||
|
type Sidenote = State SidenoteState
|
||||||
|
|
||||||
|
-- | Like 'Text.Pandoc.SideNote.usingSideNotes', but immediately
|
||||||
|
-- pre-render the sidenotes. This has the advantage that sidenotes may
|
||||||
|
-- be wrapped in a @<div>@ (instead of a 'Span'), which allows arbitrary
|
||||||
|
-- blocks to be nested in them. The disadvantage is that one now has to
|
||||||
|
-- specify the 'WriterOptions' for the current document, meaning this is
|
||||||
|
-- meant to be used as a module and is unlikely to be useful as a
|
||||||
|
-- standalone application.
|
||||||
|
--
|
||||||
|
-- ==== __Example__
|
||||||
|
--
|
||||||
|
-- Using this function with <https://jaspervdj.be/hakyll/ hakyll> could
|
||||||
|
-- look something like the following, defining an equivalent to the
|
||||||
|
-- default @pandocCompiler@.
|
||||||
|
--
|
||||||
|
-- > myPandocCompiler :: Compiler (Item String)
|
||||||
|
-- > myPandocCompiler =
|
||||||
|
-- > pandocCompilerWithTransformM
|
||||||
|
-- > defaultHakyllReaderOptions
|
||||||
|
-- > defaultHakyllWriterOptions
|
||||||
|
-- > (usingSideNotesHTML defaultHakyllWriterOptions)
|
||||||
|
--
|
||||||
|
usingSideNotesHTML :: WriterOptions -> Pandoc -> Pandoc
|
||||||
|
usingSideNotesHTML writer (Pandoc meta blocks) =
|
||||||
|
-- Drop a superfluous paragraph at the start of the document.
|
||||||
|
Pandoc meta . someStart . walkBlocks (SNS writer 0) $ blocks
|
||||||
|
where
|
||||||
|
someStart :: [Block] -> [Block]
|
||||||
|
someStart = \case
|
||||||
|
(Para [Str ""] : bs) -> bs
|
||||||
|
bs -> bs
|
||||||
|
|
||||||
|
walkBlocks :: SidenoteState -> [Block] -> [Block]
|
||||||
|
walkBlocks sns = \case
|
||||||
|
[] -> []
|
||||||
|
(b : bs) -> b' <> walkBlocks s' bs
|
||||||
|
where (b', s') = walkM mkSidenote [b] `runState` sns
|
||||||
|
|
||||||
|
-- Sidenotes can probably appear in more places; this should be
|
||||||
|
-- filled-in at some point.
|
||||||
|
mkSidenote :: [Block] -> Sidenote [Block]
|
||||||
|
mkSidenote = foldM (\acc b -> (acc <>) <$> single b) []
|
||||||
|
where
|
||||||
|
-- Try to find and render a sidenote in a single block.
|
||||||
|
single :: Block -> Sidenote [Block]
|
||||||
|
single = \case
|
||||||
|
-- Simulate a paragraph by inserting a dummy block; this is needed
|
||||||
|
-- in case two consecutive paragraphs have sidenotes, or a paragraph
|
||||||
|
-- doesn't have one at all.
|
||||||
|
Para inlines -> (Para [Str ""] :) <$> renderSidenote [] inlines
|
||||||
|
Plain inlines -> renderSidenote [] inlines
|
||||||
|
OrderedList attrs bs -> (:[]) . OrderedList attrs <$> traverse mkSidenote bs
|
||||||
|
BulletList bs -> (:[]) . BulletList <$> traverse mkSidenote bs
|
||||||
|
block -> pure [block]
|
||||||
|
|
||||||
|
renderSidenote :: [Inline] -> [Inline] -> Sidenote [Block]
|
||||||
|
renderSidenote !inlines = \case
|
||||||
|
[] -> pure [plain inlines]
|
||||||
|
Note bs : xs -> do block <- go bs
|
||||||
|
mappend [ -- Start gluing before, see [Note Comment].
|
||||||
|
plain (RawInline "html" commentStart : inlines)
|
||||||
|
, block
|
||||||
|
]
|
||||||
|
<$> renderSidenote
|
||||||
|
[RawInline "html" commentEnd] -- End gluing after
|
||||||
|
xs
|
||||||
|
b : xs -> renderSidenote (b : inlines) xs
|
||||||
|
where
|
||||||
|
go :: [Block] -> Sidenote Block
|
||||||
|
go blocks = do
|
||||||
|
SNS w i <- get <* modify' (\sns -> sns{ counter = 1 + counter sns })
|
||||||
|
let (typ, noteText) = getNoteType (render w blocks)
|
||||||
|
pure . RawBlock "html" $
|
||||||
|
mconcat [ commentEnd -- End gluing before
|
||||||
|
, label typ i <> input i <> note typ noteText
|
||||||
|
, commentStart -- Start gluing after
|
||||||
|
]
|
||||||
|
|
||||||
|
-- The '{-}' symbol differentiates between margin note and side note.
|
||||||
|
getNoteType :: Text -> (NoteType, Text)
|
||||||
|
getNoteType t
|
||||||
|
| "{-} " `T.isPrefixOf` t = (Marginnote, T.drop 4 t)
|
||||||
|
| otherwise = (Sidenote , t)
|
||||||
|
|
||||||
|
render :: WriterOptions -> [Block] -> Text
|
||||||
|
render w bs = case runPure (writeHtml5String w (Pandoc mempty bs)) of
|
||||||
|
Left err -> error $ "Text.Pandoc.SideNoteHTML.writePandocWith: " ++ show err
|
||||||
|
Right txt -> T.drop 1 (T.dropWhile (/= '\n') txt)
|
||||||
|
|
||||||
|
commentEnd :: T.Text
|
||||||
|
commentEnd = "-->"
|
||||||
|
|
||||||
|
commentStart :: T.Text
|
||||||
|
commentStart = "<!--"
|
||||||
|
|
||||||
|
plain :: [Inline] -> Block
|
||||||
|
plain = Plain . reverse
|
||||||
|
|
||||||
|
label :: NoteType -> Int -> Text
|
||||||
|
label nt i = "<label for=\"sn-" <> tshow i <> "\" class=\"margin-toggle" <> sidenoteNumber <> "\">" <> altSymbol <> "</label>"
|
||||||
|
where
|
||||||
|
sidenoteNumber :: Text = case nt of
|
||||||
|
Sidenote -> " sidenote-number"
|
||||||
|
Marginnote -> ""
|
||||||
|
altSymbol :: Text = case nt of
|
||||||
|
Sidenote -> ""
|
||||||
|
Marginnote -> "⊕"
|
||||||
|
|
||||||
|
input :: Int -> Text
|
||||||
|
input i = "<input type=\"checkbox\" id=\"sn-" <> tshow i <> "\" class=\"margin-toggle\"/>"
|
||||||
|
|
||||||
|
note :: NoteType -> Text -> Text
|
||||||
|
note nt body = "<div class=\"" <> T.toLower (tshow nt) <> "\">" <> body <> "</div>"
|
||||||
|
|
||||||
|
{- [Note Comment]
|
||||||
|
|
||||||
|
This is obviously horrible, but we have to do this in order for the
|
||||||
|
blocks (which are now not inline elements anymore!) immediately before
|
||||||
|
and after the sidenote to be "glued" to the sidenote itself. In this
|
||||||
|
way, the number indicating the sidenote does not have an extra space
|
||||||
|
associated to it on either side, which otherwise would be the case.
|
||||||
|
|
||||||
|
-}
|
||||||
+255
@@ -0,0 +1,255 @@
|
|||||||
|
{-# LANGUAGE BlockArguments #-}
|
||||||
|
{-# LANGUAGE LambdaCase #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE ScopedTypeVariables #-}
|
||||||
|
{-# LANGUAGE ViewPatterns #-}
|
||||||
|
|
||||||
|
import ChaoDoc
|
||||||
|
import Data.List (sortOn)
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import Hakyll
|
||||||
|
import System.FilePath
|
||||||
|
import Text.Pandoc
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- https://www.rohanjain.in/hakyll-clean-urls/
|
||||||
|
cleanRoute :: Routes
|
||||||
|
cleanRoute = customRoute createIndexRoute
|
||||||
|
where
|
||||||
|
createIndexRoute ident = takeDirectory p </> takeBaseName p </> "index.html"
|
||||||
|
where
|
||||||
|
p = toFilePath ident
|
||||||
|
|
||||||
|
cleanIndexHtmls :: Item String -> Compiler (Item String)
|
||||||
|
cleanIndexHtmls = return . fmap (replaceAll pattern replacement)
|
||||||
|
where
|
||||||
|
pattern :: String = "/index.html"
|
||||||
|
replacement :: String -> String = const "/"
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
config :: Configuration
|
||||||
|
config =
|
||||||
|
defaultConfiguration
|
||||||
|
{ ignoreFile = \path ->
|
||||||
|
ignoreFile defaultConfiguration path
|
||||||
|
|| ".git" `elem` splitDirectories (normalise path)
|
||||||
|
}
|
||||||
|
|
||||||
|
main :: IO ()
|
||||||
|
main = hakyllWith config $ do
|
||||||
|
match "images/**" $ do
|
||||||
|
route idRoute
|
||||||
|
compile copyFileCompiler
|
||||||
|
|
||||||
|
match "math-macros.md" $ compile getResourceBody
|
||||||
|
|
||||||
|
match "fonts/*.woff2" $ do
|
||||||
|
route idRoute
|
||||||
|
compile copyFileCompiler
|
||||||
|
|
||||||
|
match "favicon.ico" $ do
|
||||||
|
route idRoute
|
||||||
|
compile copyFileCompiler
|
||||||
|
|
||||||
|
-- match "404.html" $ do
|
||||||
|
-- route cleanRoute
|
||||||
|
-- compile copyFileCompiler
|
||||||
|
|
||||||
|
match "css/*" $ do
|
||||||
|
route idRoute
|
||||||
|
compile compressCssCompiler
|
||||||
|
|
||||||
|
-- match "about.md" $ do
|
||||||
|
-- route cleanRoute
|
||||||
|
-- compile $
|
||||||
|
-- chaoDocCompiler
|
||||||
|
-- >>= loadAndApplyTemplate "templates/about.html" defaultContext
|
||||||
|
-- >>= relativizeUrls
|
||||||
|
|
||||||
|
-- -- build up tags
|
||||||
|
-- tags <- buildTags "posts/*" (fromCapture "tags/*.html")
|
||||||
|
-- tagsRules tags $ \tag pattern -> do
|
||||||
|
-- let title = "Posts tagged \"" ++ tag ++ "\""
|
||||||
|
-- route cleanRoute
|
||||||
|
-- compile $ do
|
||||||
|
-- posts <- recentFirst =<< loadAll pattern
|
||||||
|
-- let ctx =
|
||||||
|
-- constField "title" title
|
||||||
|
-- `mappend` listField "posts" (postCtxWithTags tags) (return posts)
|
||||||
|
-- `mappend` defaultContext
|
||||||
|
-- makeItem ""
|
||||||
|
-- >>= loadAndApplyTemplate "templates/tag.html" ctx
|
||||||
|
-- >>= loadAndApplyTemplate "templates/default.html" ctx
|
||||||
|
-- >>= relativizeUrls
|
||||||
|
|
||||||
|
-- create ["tags.html"] $ do
|
||||||
|
-- route cleanRoute
|
||||||
|
-- compile $ do
|
||||||
|
-- makeItem ""
|
||||||
|
-- >>= loadAndApplyTemplate "templates/tags.html" (defaultCtxWithTags tags)
|
||||||
|
-- >>= loadAndApplyTemplate "templates/default.html" (defaultCtxWithTags tags)
|
||||||
|
|
||||||
|
-- match "posts/*" $ do
|
||||||
|
-- route cleanRoute
|
||||||
|
-- compile $ do
|
||||||
|
-- tocCtx <- getTocCtx (postCtxWithTags tags)
|
||||||
|
-- chaoDocCompiler
|
||||||
|
-- >>= loadAndApplyTemplate "templates/post.html" tocCtx
|
||||||
|
-- >>= loadAndApplyTemplate "templates/default.html" tocCtx
|
||||||
|
-- >>= relativizeUrls
|
||||||
|
-- -- >>= katexFilter
|
||||||
|
|
||||||
|
match "notes/*" $ do
|
||||||
|
route cleanRoute
|
||||||
|
compile $ do
|
||||||
|
tocCtx <- getTocCtx defaultContext
|
||||||
|
chaoDocCompiler
|
||||||
|
>>= loadAndApplyTemplate "templates/note.html" tocCtx
|
||||||
|
>>= relativizeUrls
|
||||||
|
|
||||||
|
create ["index.html"] $ do
|
||||||
|
route idRoute
|
||||||
|
compile $ do
|
||||||
|
notes <- sortOn (toFilePath . itemIdentifier) <$> loadAll "notes/*"
|
||||||
|
let notesCtx =
|
||||||
|
listField "posts" defaultContext (return notes)
|
||||||
|
`mappend` constField "title" "Notes"
|
||||||
|
`mappend` defaultContext
|
||||||
|
makeItem ""
|
||||||
|
>>= loadAndApplyTemplate "templates/notes.html" notesCtx
|
||||||
|
>>= loadAndApplyTemplate "templates/index.html" notesCtx
|
||||||
|
>>= relativizeUrls
|
||||||
|
>>= cleanIndexHtmls
|
||||||
|
|
||||||
|
-- create ["archive.html"] $ do
|
||||||
|
-- route cleanRoute
|
||||||
|
-- compile $ do
|
||||||
|
-- posts <- recentFirst =<< loadAll "posts/*"
|
||||||
|
-- let archiveCtx =
|
||||||
|
-- listField "posts" postCtx (return posts)
|
||||||
|
-- `mappend` constField "title" "Archives"
|
||||||
|
-- `mappend` defaultContext
|
||||||
|
-- makeItem ""
|
||||||
|
-- >>= loadAndApplyTemplate "templates/archive.html" archiveCtx
|
||||||
|
-- >>= loadAndApplyTemplate "templates/index.html" archiveCtx
|
||||||
|
-- >>= relativizeUrls
|
||||||
|
-- >>= cleanIndexHtmls
|
||||||
|
|
||||||
|
-- create ["draft.html"] $ do
|
||||||
|
-- route cleanRoute
|
||||||
|
-- compile $ do
|
||||||
|
-- posts <- recentFirst =<< loadAll "posts/*"
|
||||||
|
-- let draftCtx =
|
||||||
|
-- listField "posts" postCtx (return posts)
|
||||||
|
-- `mappend` constField "title" "Drafts"
|
||||||
|
-- `mappend` defaultContext
|
||||||
|
-- makeItem ""
|
||||||
|
-- >>= loadAndApplyTemplate "templates/draft.html" draftCtx
|
||||||
|
-- >>= loadAndApplyTemplate "templates/index.html" draftCtx
|
||||||
|
-- >>= relativizeUrls
|
||||||
|
-- >>= cleanIndexHtmls
|
||||||
|
|
||||||
|
-- match "index.html" $ do
|
||||||
|
-- route idRoute
|
||||||
|
-- compile $ do
|
||||||
|
-- posts <- fmap (take 25) . recentFirst =<< loadAll "posts/*"
|
||||||
|
-- let indexCtx =
|
||||||
|
-- listField "posts" postCtx (return posts)
|
||||||
|
-- `mappend` defaultContext
|
||||||
|
-- getResourceBody
|
||||||
|
-- >>= applyAsTemplate indexCtx
|
||||||
|
-- >>= loadAndApplyTemplate "templates/index.html" indexCtx
|
||||||
|
-- >>= relativizeUrls
|
||||||
|
-- >>= cleanIndexHtmls
|
||||||
|
|
||||||
|
match "templates/*" $ compile templateBodyCompiler
|
||||||
|
|
||||||
|
-- https://robertwpearce.com/hakyll-pt-2-generating-a-sitemap-xml-file.html
|
||||||
|
-- create ["sitemap.xml"] $ do
|
||||||
|
-- route idRoute
|
||||||
|
-- compile $ do
|
||||||
|
-- posts <- recentFirst =<< loadAll "posts/*"
|
||||||
|
-- singlePages <- loadAll (fromList ["about.md"])
|
||||||
|
-- let pages = posts <> singlePages
|
||||||
|
-- sitemapCtx =
|
||||||
|
-- constField "root" root
|
||||||
|
-- <> listField "pages" postCtx (return pages) -- here
|
||||||
|
-- makeItem ""
|
||||||
|
-- >>= loadAndApplyTemplate "templates/sitemap.xml" sitemapCtx
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
-- isZhField :: Context String
|
||||||
|
-- isZhField = boolFieldM "isZh" isZh
|
||||||
|
-- where
|
||||||
|
-- isZh :: Item String -> Compiler Bool
|
||||||
|
-- isZh item = do
|
||||||
|
-- maybeLang <- getMetadataField (itemIdentifier item) "lang"
|
||||||
|
-- return (maybeLang == Just "zh")
|
||||||
|
|
||||||
|
-- postCtx :: Context String
|
||||||
|
-- postCtx =
|
||||||
|
-- dateField "date" "%B %e, %Y"
|
||||||
|
-- <> dateField "date" "%Y-%m-%d"
|
||||||
|
-- <> isZhField
|
||||||
|
-- <> defaultContext
|
||||||
|
|
||||||
|
-- postCtxWithTags :: Tags -> Context String
|
||||||
|
-- postCtxWithTags tags = tagsField "tags" tags `mappend` postCtx
|
||||||
|
|
||||||
|
-- defaultCtxWithTags :: Tags -> Context String
|
||||||
|
-- defaultCtxWithTags tags = listField "tags" tagsCtx getAllTags <> defaultContext
|
||||||
|
-- where
|
||||||
|
-- getAllTags :: Compiler [Item (String, [Identifier])]
|
||||||
|
-- getAllTags = pure . map mkItem $ tagsMap tags
|
||||||
|
-- where
|
||||||
|
-- mkItem :: (String, [Identifier]) -> Item (String, [Identifier])
|
||||||
|
-- mkItem x@(t, _) = Item (tagsMakeId tags t) x
|
||||||
|
-- tagsCtx =
|
||||||
|
-- listFieldWith "posts" (postCtxWithTags tags) getPosts
|
||||||
|
-- <> metadataField
|
||||||
|
-- <> urlField "url"
|
||||||
|
-- <> pathField "path"
|
||||||
|
-- <> titleField "title"
|
||||||
|
-- <> missingField
|
||||||
|
-- where
|
||||||
|
-- getPosts ::
|
||||||
|
-- Item (String, [Identifier]) ->
|
||||||
|
-- Compiler [Item String]
|
||||||
|
-- getPosts (itemBody -> (_, is)) = mapM load is
|
||||||
|
|
||||||
|
-- toc from https://github.com/slotThe/slotThe.github.io
|
||||||
|
getTocCtx :: Context a -> Compiler (Context a)
|
||||||
|
getTocCtx ctx = do
|
||||||
|
noToc <- (Just "true" ==) <$> (getUnderlying >>= (`getMetadataField` "no-toc"))
|
||||||
|
writerOpts <- mkTocWriter defaultHakyllWriterOptions
|
||||||
|
toc <- writePandocWith writerOpts <$> chaoDocPandocCompiler
|
||||||
|
pure $
|
||||||
|
mconcat
|
||||||
|
[ ctx,
|
||||||
|
constField "toc" $ killLinkIds (itemBody toc),
|
||||||
|
if noToc then boolField "no-toc" (pure noToc) else mempty
|
||||||
|
]
|
||||||
|
where
|
||||||
|
mkTocWriter :: WriterOptions -> Compiler WriterOptions
|
||||||
|
mkTocWriter writerOpts = do
|
||||||
|
tmpl <- either (const Nothing) Just <$> unsafeCompiler (compileTemplate "" "$toc$")
|
||||||
|
pure $
|
||||||
|
writerOpts
|
||||||
|
{ writerTableOfContents = True,
|
||||||
|
writerTOCDepth = 2,
|
||||||
|
writerTemplate = tmpl,
|
||||||
|
writerHTMLMathMethod = MathML
|
||||||
|
}
|
||||||
|
|
||||||
|
asTxt :: (T.Text -> T.Text) -> String -> String
|
||||||
|
asTxt f = T.unpack . f . T.pack
|
||||||
|
|
||||||
|
killLinkIds :: String -> String
|
||||||
|
killLinkIds = asTxt (mconcat . go . T.splitOn "id=\"toc-")
|
||||||
|
where
|
||||||
|
go :: [T.Text] -> [T.Text]
|
||||||
|
go = \case
|
||||||
|
[] -> []
|
||||||
|
x : xs -> x : map (T.drop 1 . T.dropWhile (/= '\"')) xs
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<meta name="googlebot" content="noindex" />
|
||||||
|
<title>$title$</title>
|
||||||
|
<link rel="stylesheet" href="/css/fonts.css" />
|
||||||
|
<link rel="stylesheet" href="/css/default.css" />
|
||||||
|
<link rel="stylesheet" href="/css/pygentize.css" />
|
||||||
|
<link rel="stylesheet" href="/css/chao-theorems.css" />
|
||||||
|
<link rel="stylesheet" href="/css/sidenotes.css" />
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
<meta name="googlebot" content="noindex">
|
||||||
|
<title></title>
|
||||||
|
<link rel="stylesheet" href="/css/fonts.css" />
|
||||||
|
<link rel="stylesheet" href="/css/default.css" />
|
||||||
|
<link rel="stylesheet" href="/css/pygentize.css" />
|
||||||
|
<link rel="stylesheet" href="/css/chao-theorems.css">
|
||||||
|
<script>
|
||||||
|
// page title
|
||||||
|
document.addEventListener("DOMContentLoaded", function () {
|
||||||
|
const hostname = window.location.hostname;
|
||||||
|
document.title = document.title + hostname;
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class="toc"></div>
|
||||||
|
<div class="text-space">
|
||||||
|
$partial("templates/navbar.html")$
|
||||||
|
<main role="main">
|
||||||
|
$body$
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
<!--<header class="no-print">
|
||||||
|
<nav class="navbar">
|
||||||
|
<a href="/">Home</a>
|
||||||
|
<div class="navright">
|
||||||
|
<a href="/draft">Drafts</a>
|
||||||
|
<a href="/about">About</a>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
</header>-->
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
$partial("templates/head.html")$
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body $if(isZh)$class="lang-zh" $endif$>
|
||||||
|
<div class="toc">
|
||||||
|
<!-- A table of contents on the left side, but only for screens
|
||||||
|
that are big enough -->
|
||||||
|
<div id="contents-big">
|
||||||
|
<p class="mini-header">Notes <a id="up-arrow" href="/">←</a></p>
|
||||||
|
<p class="mini-header">Contents <a id="up-arrow" href="#">↑</a></p>
|
||||||
|
$toc$
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="text-space">
|
||||||
|
<main role="main">
|
||||||
|
<h1 class="pagetitle">$title$</h1>
|
||||||
|
<article>
|
||||||
|
<section class="subtitle">
|
||||||
|
$if(subtitle)$
|
||||||
|
$subtitle$
|
||||||
|
$endif$
|
||||||
|
</section>
|
||||||
|
<section class="body">
|
||||||
|
$body$
|
||||||
|
</section>
|
||||||
|
</article>
|
||||||
|
</main>
|
||||||
|
<footer class="no-print">
|
||||||
|
<!--Site proudly generated by
|
||||||
|
<a href="http://jaspervdj.be/hakyll">Hakyll</a>.
|
||||||
|
<a href="https://github.com/congyu711/Hakyllsite">Source</a> on Github.
|
||||||
|
License <a href="https://creativecommons.org/licenses/by/4.0/">CC BY 4.0 </a>-->
|
||||||
|
</footer>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
<h1 class="pagetitle">$title$</h1>
|
||||||
|
<ul>
|
||||||
|
$for(posts)$
|
||||||
|
<li>
|
||||||
|
<a href="$url$">$title$</a>
|
||||||
|
</li>
|
||||||
|
$endfor$
|
||||||
|
</ul>
|
||||||
Reference in New Issue
Block a user